var/home/core/zuul-output/0000755000175000017500000000000015112763542014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113010523015460 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006367725715113010514017713 0ustar rootrootNov 30 06:46:18 crc systemd[1]: Starting Kubernetes Kubelet... Nov 30 06:46:18 crc restorecon[4750]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:18 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 30 06:46:19 crc restorecon[4750]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 30 06:46:19 crc kubenswrapper[4941]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 30 06:46:19 crc kubenswrapper[4941]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 30 06:46:19 crc kubenswrapper[4941]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 30 06:46:19 crc kubenswrapper[4941]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 30 06:46:19 crc kubenswrapper[4941]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 30 06:46:19 crc kubenswrapper[4941]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.338368 4941 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341235 4941 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341253 4941 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341258 4941 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341261 4941 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341265 4941 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341269 4941 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341273 4941 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341277 4941 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341281 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341285 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341289 4941 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341294 4941 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341298 4941 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341301 4941 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341305 4941 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341309 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341313 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341316 4941 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341320 4941 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341335 4941 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341339 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341342 4941 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341346 4941 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341350 4941 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341353 4941 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341357 4941 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341360 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341364 4941 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341369 4941 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341374 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341378 4941 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341382 4941 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341387 4941 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341392 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341405 4941 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341410 4941 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341415 4941 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341419 4941 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341424 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341428 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341432 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341437 4941 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341442 4941 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341447 4941 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341451 4941 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341454 4941 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341458 4941 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341462 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341466 4941 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341470 4941 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341473 4941 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341477 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341480 4941 feature_gate.go:330] unrecognized feature gate: Example Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341484 4941 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341487 4941 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341491 4941 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341494 4941 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341498 4941 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341502 4941 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341507 4941 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341510 4941 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341514 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341517 4941 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341522 4941 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341525 4941 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341528 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341532 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341535 4941 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341538 4941 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341542 4941 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.341547 4941 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341662 4941 flags.go:64] FLAG: --address="0.0.0.0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341675 4941 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341684 4941 flags.go:64] FLAG: --anonymous-auth="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341692 4941 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341707 4941 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341715 4941 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341725 4941 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341732 4941 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341739 4941 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341745 4941 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341750 4941 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341756 4941 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341762 4941 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341768 4941 flags.go:64] FLAG: --cgroup-root="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341772 4941 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341778 4941 flags.go:64] FLAG: --client-ca-file="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341782 4941 flags.go:64] FLAG: --cloud-config="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341788 4941 flags.go:64] FLAG: --cloud-provider="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341792 4941 flags.go:64] FLAG: --cluster-dns="[]" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341800 4941 flags.go:64] FLAG: --cluster-domain="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341806 4941 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341811 4941 flags.go:64] FLAG: --config-dir="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341816 4941 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341823 4941 flags.go:64] FLAG: --container-log-max-files="5" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341831 4941 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341837 4941 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341842 4941 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341848 4941 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341852 4941 flags.go:64] FLAG: --contention-profiling="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341857 4941 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341861 4941 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341866 4941 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341870 4941 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341876 4941 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341881 4941 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341886 4941 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341892 4941 flags.go:64] FLAG: --enable-load-reader="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341896 4941 flags.go:64] FLAG: --enable-server="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341901 4941 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341908 4941 flags.go:64] FLAG: --event-burst="100" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341912 4941 flags.go:64] FLAG: --event-qps="50" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341916 4941 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341921 4941 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341925 4941 flags.go:64] FLAG: --eviction-hard="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341931 4941 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341935 4941 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341940 4941 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341944 4941 flags.go:64] FLAG: --eviction-soft="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341949 4941 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341953 4941 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341958 4941 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341962 4941 flags.go:64] FLAG: --experimental-mounter-path="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341966 4941 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341970 4941 flags.go:64] FLAG: --fail-swap-on="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341975 4941 flags.go:64] FLAG: --feature-gates="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341981 4941 flags.go:64] FLAG: --file-check-frequency="20s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341985 4941 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341990 4941 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341994 4941 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.341998 4941 flags.go:64] FLAG: --healthz-port="10248" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342003 4941 flags.go:64] FLAG: --help="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342008 4941 flags.go:64] FLAG: --hostname-override="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342012 4941 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342016 4941 flags.go:64] FLAG: --http-check-frequency="20s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342021 4941 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342025 4941 flags.go:64] FLAG: --image-credential-provider-config="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342029 4941 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342034 4941 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342038 4941 flags.go:64] FLAG: --image-service-endpoint="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342043 4941 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342048 4941 flags.go:64] FLAG: --kube-api-burst="100" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342053 4941 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342058 4941 flags.go:64] FLAG: --kube-api-qps="50" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342063 4941 flags.go:64] FLAG: --kube-reserved="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342068 4941 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342072 4941 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342076 4941 flags.go:64] FLAG: --kubelet-cgroups="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342081 4941 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342085 4941 flags.go:64] FLAG: --lock-file="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342089 4941 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342093 4941 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342098 4941 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342104 4941 flags.go:64] FLAG: --log-json-split-stream="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342109 4941 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342114 4941 flags.go:64] FLAG: --log-text-split-stream="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342118 4941 flags.go:64] FLAG: --logging-format="text" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342122 4941 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342127 4941 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342132 4941 flags.go:64] FLAG: --manifest-url="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342136 4941 flags.go:64] FLAG: --manifest-url-header="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342143 4941 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342148 4941 flags.go:64] FLAG: --max-open-files="1000000" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342154 4941 flags.go:64] FLAG: --max-pods="110" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342158 4941 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342162 4941 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342167 4941 flags.go:64] FLAG: --memory-manager-policy="None" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342171 4941 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342177 4941 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342188 4941 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342195 4941 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342210 4941 flags.go:64] FLAG: --node-status-max-images="50" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342216 4941 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342222 4941 flags.go:64] FLAG: --oom-score-adj="-999" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342227 4941 flags.go:64] FLAG: --pod-cidr="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342232 4941 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342239 4941 flags.go:64] FLAG: --pod-manifest-path="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342243 4941 flags.go:64] FLAG: --pod-max-pids="-1" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342249 4941 flags.go:64] FLAG: --pods-per-core="0" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342254 4941 flags.go:64] FLAG: --port="10250" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342259 4941 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342263 4941 flags.go:64] FLAG: --provider-id="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342267 4941 flags.go:64] FLAG: --qos-reserved="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342272 4941 flags.go:64] FLAG: --read-only-port="10255" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342277 4941 flags.go:64] FLAG: --register-node="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342281 4941 flags.go:64] FLAG: --register-schedulable="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342286 4941 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342302 4941 flags.go:64] FLAG: --registry-burst="10" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342306 4941 flags.go:64] FLAG: --registry-qps="5" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342311 4941 flags.go:64] FLAG: --reserved-cpus="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342319 4941 flags.go:64] FLAG: --reserved-memory="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342341 4941 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342346 4941 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342351 4941 flags.go:64] FLAG: --rotate-certificates="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342355 4941 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342360 4941 flags.go:64] FLAG: --runonce="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342365 4941 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342369 4941 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342374 4941 flags.go:64] FLAG: --seccomp-default="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342378 4941 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342383 4941 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342387 4941 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342392 4941 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342397 4941 flags.go:64] FLAG: --storage-driver-password="root" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342401 4941 flags.go:64] FLAG: --storage-driver-secure="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342406 4941 flags.go:64] FLAG: --storage-driver-table="stats" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342410 4941 flags.go:64] FLAG: --storage-driver-user="root" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342415 4941 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342420 4941 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342425 4941 flags.go:64] FLAG: --system-cgroups="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342429 4941 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342436 4941 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342440 4941 flags.go:64] FLAG: --tls-cert-file="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342444 4941 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342453 4941 flags.go:64] FLAG: --tls-min-version="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342457 4941 flags.go:64] FLAG: --tls-private-key-file="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342461 4941 flags.go:64] FLAG: --topology-manager-policy="none" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342465 4941 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342469 4941 flags.go:64] FLAG: --topology-manager-scope="container" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342474 4941 flags.go:64] FLAG: --v="2" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342485 4941 flags.go:64] FLAG: --version="false" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342491 4941 flags.go:64] FLAG: --vmodule="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342498 4941 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.342503 4941 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342632 4941 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342640 4941 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342645 4941 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342657 4941 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342663 4941 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342667 4941 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342687 4941 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342693 4941 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342697 4941 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342702 4941 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342706 4941 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342711 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342715 4941 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342719 4941 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342722 4941 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342726 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342730 4941 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342734 4941 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342737 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342741 4941 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342745 4941 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342750 4941 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342755 4941 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342760 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342764 4941 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342768 4941 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342773 4941 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342777 4941 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342781 4941 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342785 4941 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342791 4941 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342795 4941 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342799 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342803 4941 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342807 4941 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342811 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342815 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342818 4941 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342822 4941 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342827 4941 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342831 4941 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342835 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342839 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342843 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342847 4941 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342851 4941 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342855 4941 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342858 4941 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342862 4941 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342866 4941 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342869 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342873 4941 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342877 4941 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342880 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342884 4941 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342888 4941 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342892 4941 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342895 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342899 4941 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342902 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342906 4941 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342910 4941 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342916 4941 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342920 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342924 4941 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342928 4941 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342933 4941 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342937 4941 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342942 4941 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342946 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.342950 4941 feature_gate.go:330] unrecognized feature gate: Example Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.343123 4941 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.351934 4941 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.351982 4941 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352076 4941 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352093 4941 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352100 4941 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352106 4941 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352112 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352117 4941 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352122 4941 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352128 4941 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352133 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352138 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352145 4941 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352155 4941 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352162 4941 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352168 4941 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352174 4941 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352179 4941 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352185 4941 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352190 4941 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352195 4941 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352200 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352205 4941 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352210 4941 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352216 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352221 4941 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352257 4941 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352265 4941 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352273 4941 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352279 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352284 4941 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352289 4941 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352294 4941 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352298 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352303 4941 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352308 4941 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352315 4941 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352341 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352349 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352356 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352362 4941 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352367 4941 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352373 4941 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352378 4941 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352384 4941 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352389 4941 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352394 4941 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352400 4941 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352405 4941 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352409 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352415 4941 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352419 4941 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352425 4941 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352430 4941 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352434 4941 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352439 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352444 4941 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352450 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352455 4941 feature_gate.go:330] unrecognized feature gate: Example Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352459 4941 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352464 4941 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352469 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352474 4941 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352479 4941 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352484 4941 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352489 4941 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352494 4941 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352499 4941 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352504 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352509 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352514 4941 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352518 4941 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352524 4941 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.352534 4941 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352677 4941 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352687 4941 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352693 4941 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352699 4941 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352704 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352711 4941 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352716 4941 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352722 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352727 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352732 4941 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352737 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352742 4941 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352747 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352751 4941 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352756 4941 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352761 4941 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352766 4941 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352771 4941 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352776 4941 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352781 4941 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352786 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352791 4941 feature_gate.go:330] unrecognized feature gate: Example Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352795 4941 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352800 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352805 4941 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352810 4941 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352816 4941 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352822 4941 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352827 4941 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352832 4941 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352837 4941 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352842 4941 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352847 4941 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352853 4941 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352859 4941 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352865 4941 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352870 4941 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352878 4941 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352884 4941 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352889 4941 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352894 4941 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352899 4941 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352904 4941 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352909 4941 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352914 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352918 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352923 4941 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352928 4941 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352936 4941 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352941 4941 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352947 4941 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352952 4941 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352957 4941 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352962 4941 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352966 4941 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352971 4941 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352976 4941 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352981 4941 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352986 4941 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352991 4941 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.352995 4941 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353000 4941 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353005 4941 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353010 4941 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353016 4941 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353021 4941 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353026 4941 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353031 4941 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353037 4941 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353042 4941 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.353047 4941 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.353056 4941 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.353544 4941 server.go:940] "Client rotation is on, will bootstrap in background" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.359668 4941 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.359873 4941 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.360933 4941 server.go:997] "Starting client certificate rotation" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.360985 4941 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.361921 4941 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-10 19:52:59.595532359 +0000 UTC Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.362033 4941 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 253h6m40.233506231s for next certificate rotation Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.368596 4941 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.371361 4941 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.381791 4941 log.go:25] "Validated CRI v1 runtime API" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.404405 4941 log.go:25] "Validated CRI v1 image API" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.406547 4941 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.410498 4941 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-30-06-41-23-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.410572 4941 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.438059 4941 manager.go:217] Machine: {Timestamp:2025-11-30 06:46:19.435442162 +0000 UTC m=+0.203613861 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:f889fd33-4d25-4c91-a88f-22995726b30c BootID:105cacd9-725d-42a6-8952-62e3f6b69189 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:73:6e:c0 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:73:6e:c0 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:35:8e:27 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f6:95:09 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f2:39:29 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:e2:62:f9 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:e1:14:bc Speed:-1 Mtu:1496} {Name:eth10 MacAddress:da:b5:5f:28:0b:9c Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:d6:bf:21:76:e5:4f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.438551 4941 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.438841 4941 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.439683 4941 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.439994 4941 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.440065 4941 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.440506 4941 topology_manager.go:138] "Creating topology manager with none policy" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.440526 4941 container_manager_linux.go:303] "Creating device plugin manager" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.440829 4941 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.440896 4941 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.441263 4941 state_mem.go:36] "Initialized new in-memory state store" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.441441 4941 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.442650 4941 kubelet.go:418] "Attempting to sync node with API server" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.442694 4941 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.442755 4941 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.442784 4941 kubelet.go:324] "Adding apiserver pod source" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.442809 4941 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.446312 4941 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.447273 4941 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.448106 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.447455 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.448374 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.448419 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.448973 4941 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449706 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449741 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449754 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449766 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449783 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449796 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449807 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449827 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449841 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449853 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449872 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.449885 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.450147 4941 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.450827 4941 server.go:1280] "Started kubelet" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.451279 4941 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.451280 4941 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.451891 4941 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.452146 4941 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:19 crc systemd[1]: Started Kubernetes Kubelet. Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.453233 4941 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.107:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187cb5ec0c9afd89 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-30 06:46:19.450785161 +0000 UTC m=+0.218956780,LastTimestamp:2025-11-30 06:46:19.450785161 +0000 UTC m=+0.218956780,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.454107 4941 server.go:460] "Adding debug handlers to kubelet server" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455140 4941 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455202 4941 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455264 4941 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 19:38:09.736211102 +0000 UTC Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455346 4941 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1116h51m50.280888728s for next certificate rotation Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455749 4941 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455778 4941 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.455899 4941 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.456458 4941 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.456949 4941 factory.go:55] Registering systemd factory Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.456993 4941 factory.go:221] Registration of the systemd container factory successfully Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.457105 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="200ms" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.457194 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.457264 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.459805 4941 factory.go:153] Registering CRI-O factory Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.459835 4941 factory.go:221] Registration of the crio container factory successfully Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.459902 4941 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.459932 4941 factory.go:103] Registering Raw factory Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.459951 4941 manager.go:1196] Started watching for new ooms in manager Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.460764 4941 manager.go:319] Starting recovery of all containers Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468595 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468681 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468705 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468731 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468751 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468771 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468796 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468816 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468844 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468872 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468898 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468918 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468941 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468974 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.468999 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469025 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469045 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469074 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469092 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469110 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469132 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469150 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469172 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469190 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469210 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469234 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469268 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469292 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469315 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469370 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469532 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469571 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469622 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469638 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469660 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469674 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469689 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469709 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469725 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469748 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469763 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469776 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469794 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469807 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469827 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469843 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469858 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469877 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469892 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469908 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469922 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469936 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469965 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.469987 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.470010 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.470026 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.470049 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.470062 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.470080 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.470093 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472096 4941 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472158 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472223 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472258 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472281 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472311 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472365 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472390 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472421 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472442 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472465 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472492 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472516 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472544 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472567 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472633 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472673 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472706 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472736 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.472759 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474625 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474696 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474731 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474747 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474760 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474771 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474784 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474798 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474809 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474821 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474836 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474851 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474863 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474874 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474887 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474898 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474941 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474954 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474965 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474977 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474987 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.474998 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475035 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475047 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475058 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475094 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475109 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475125 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475138 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475151 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475163 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475176 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475190 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475201 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475216 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475230 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475241 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475251 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475263 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475274 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475289 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475300 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475310 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475361 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475372 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475381 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475398 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475410 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475422 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475432 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475446 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475457 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475471 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475482 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475496 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475509 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475524 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475534 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475549 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475560 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475572 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475586 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475596 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475610 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475619 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475630 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475639 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475652 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475664 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475676 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475686 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475699 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475708 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475719 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475728 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475738 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475748 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475764 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475774 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475784 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475795 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475808 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475819 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475834 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475851 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475864 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475875 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475960 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475971 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475985 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.475996 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476008 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476019 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476030 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476041 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476053 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476065 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476076 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476087 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476100 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476112 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476123 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476134 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476144 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476155 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476197 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476209 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476220 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476231 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476244 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476255 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476266 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476277 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476288 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476299 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476309 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476337 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476348 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476359 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476550 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476562 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476575 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476588 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476602 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476613 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476628 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476641 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476652 4941 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476662 4941 reconstruct.go:97] "Volume reconstruction finished" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.476671 4941 reconciler.go:26] "Reconciler: start to sync state" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.488038 4941 manager.go:324] Recovery completed Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.497497 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.499111 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.499160 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.499174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.500535 4941 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.500553 4941 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.500578 4941 state_mem.go:36] "Initialized new in-memory state store" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.511395 4941 policy_none.go:49] "None policy: Start" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.512040 4941 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.512076 4941 state_mem.go:35] "Initializing new in-memory state store" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.517671 4941 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.520239 4941 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.520303 4941 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.520388 4941 kubelet.go:2335] "Starting kubelet main sync loop" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.520489 4941 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.521444 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.521555 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.556594 4941 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.580094 4941 manager.go:334] "Starting Device Plugin manager" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.580196 4941 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.580217 4941 server.go:79] "Starting device plugin registration server" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.580919 4941 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.580948 4941 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.581360 4941 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.581479 4941 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.581494 4941 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.588089 4941 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.621087 4941 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.621238 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.622772 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.622829 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.622844 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.623038 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.623257 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.623375 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624491 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624540 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624561 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624648 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624663 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624685 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624901 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.624961 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.630975 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631003 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631028 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631056 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631080 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631595 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631705 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.631766 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633272 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633301 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633360 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633311 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633384 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633420 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.633854 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.634003 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.634071 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635359 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635430 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635396 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635441 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635492 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635808 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.635874 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.636879 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.636918 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.636935 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.658044 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="400ms" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679786 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679834 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679865 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679889 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679914 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679939 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679963 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.679990 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680017 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680039 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680062 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680110 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680196 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680259 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.680298 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.681913 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.683026 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.683071 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.683085 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.683117 4941 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.683720 4941 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.107:6443: connect: connection refused" node="crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781507 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781620 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781663 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781705 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781745 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781782 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781820 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781854 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781896 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781935 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.781972 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782010 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782043 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782080 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782127 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782566 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782592 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782651 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782669 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782690 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782717 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782767 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782769 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782824 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782788 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782862 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782869 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782919 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782955 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.782986 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.884124 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.885869 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.885931 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.885945 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.885992 4941 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 30 06:46:19 crc kubenswrapper[4941]: E1130 06:46:19.886643 4941 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.107:6443: connect: connection refused" node="crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.948131 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.968644 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.973410 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-b09c6f0605cde3b880a0ab43d39aa51a0e0ab66249881501412d51ec5ab58177 WatchSource:0}: Error finding container b09c6f0605cde3b880a0ab43d39aa51a0e0ab66249881501412d51ec5ab58177: Status 404 returned error can't find the container with id b09c6f0605cde3b880a0ab43d39aa51a0e0ab66249881501412d51ec5ab58177 Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.985860 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: W1130 06:46:19.987735 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-34e9bedb04b03a25caff1da82447e19da9c2e52c474961fb9396103914d400f2 WatchSource:0}: Error finding container 34e9bedb04b03a25caff1da82447e19da9c2e52c474961fb9396103914d400f2: Status 404 returned error can't find the container with id 34e9bedb04b03a25caff1da82447e19da9c2e52c474961fb9396103914d400f2 Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.992821 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 30 06:46:19 crc kubenswrapper[4941]: I1130 06:46:19.997922 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:20 crc kubenswrapper[4941]: W1130 06:46:20.009902 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-0bb1431404e08e2fc19686ec80e63c9472be48851a9ac8f298458764325c84cb WatchSource:0}: Error finding container 0bb1431404e08e2fc19686ec80e63c9472be48851a9ac8f298458764325c84cb: Status 404 returned error can't find the container with id 0bb1431404e08e2fc19686ec80e63c9472be48851a9ac8f298458764325c84cb Nov 30 06:46:20 crc kubenswrapper[4941]: W1130 06:46:20.013028 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2358f08ea468358670e5721190e433f3c0b84d92a5daf41816b46eba231afe4a WatchSource:0}: Error finding container 2358f08ea468358670e5721190e433f3c0b84d92a5daf41816b46eba231afe4a: Status 404 returned error can't find the container with id 2358f08ea468358670e5721190e433f3c0b84d92a5daf41816b46eba231afe4a Nov 30 06:46:20 crc kubenswrapper[4941]: W1130 06:46:20.021129 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-b98906a4c2999f8c77989dc5a2baaaa000b0ad581c549cd90ae7187f4d3f1b08 WatchSource:0}: Error finding container b98906a4c2999f8c77989dc5a2baaaa000b0ad581c549cd90ae7187f4d3f1b08: Status 404 returned error can't find the container with id b98906a4c2999f8c77989dc5a2baaaa000b0ad581c549cd90ae7187f4d3f1b08 Nov 30 06:46:20 crc kubenswrapper[4941]: E1130 06:46:20.059040 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="800ms" Nov 30 06:46:20 crc kubenswrapper[4941]: W1130 06:46:20.276785 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:20 crc kubenswrapper[4941]: E1130 06:46:20.276874 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.287130 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.288312 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.288388 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.288400 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.288424 4941 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 30 06:46:20 crc kubenswrapper[4941]: E1130 06:46:20.288822 4941 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.107:6443: connect: connection refused" node="crc" Nov 30 06:46:20 crc kubenswrapper[4941]: W1130 06:46:20.405664 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:20 crc kubenswrapper[4941]: E1130 06:46:20.405846 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.453127 4941 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.530767 4941 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b" exitCode=0 Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.530837 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.530956 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2358f08ea468358670e5721190e433f3c0b84d92a5daf41816b46eba231afe4a"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.531067 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.531991 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.532027 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.532037 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.533277 4941 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28" exitCode=0 Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.533343 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.533362 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0bb1431404e08e2fc19686ec80e63c9472be48851a9ac8f298458764325c84cb"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.533432 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.534146 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.534197 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.534210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.535779 4941 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7" exitCode=0 Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.535866 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.535898 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"34e9bedb04b03a25caff1da82447e19da9c2e52c474961fb9396103914d400f2"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.535981 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.537689 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.537718 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.537732 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.537761 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.537791 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b09c6f0605cde3b880a0ab43d39aa51a0e0ab66249881501412d51ec5ab58177"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.539753 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27" exitCode=0 Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.539791 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.539821 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b98906a4c2999f8c77989dc5a2baaaa000b0ad581c549cd90ae7187f4d3f1b08"} Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.539927 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.541310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.541365 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.541376 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.543462 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.544347 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.544369 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:20 crc kubenswrapper[4941]: I1130 06:46:20.544381 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:20 crc kubenswrapper[4941]: E1130 06:46:20.860132 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="1.6s" Nov 30 06:46:21 crc kubenswrapper[4941]: W1130 06:46:21.019957 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:21 crc kubenswrapper[4941]: E1130 06:46:21.020074 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:21 crc kubenswrapper[4941]: W1130 06:46:21.030279 4941 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:21 crc kubenswrapper[4941]: E1130 06:46:21.030387 4941 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.107:6443: connect: connection refused" logger="UnhandledError" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.089394 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.091562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.091621 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.091635 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.091668 4941 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 30 06:46:21 crc kubenswrapper[4941]: E1130 06:46:21.092247 4941 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.107:6443: connect: connection refused" node="crc" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.452854 4941 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.107:6443: connect: connection refused Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.546655 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.546701 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.546712 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.546842 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.547725 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.547775 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.547791 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.549226 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.549253 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.549267 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.549372 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.550439 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.550465 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.550475 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.553485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.553532 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.553546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.553556 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.553566 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.553953 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.558174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.558210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.558222 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.559626 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"1096a8eff1b601167d5c16fea534c438fe1fc135e61418626766c8d434b9afe6"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.559713 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.560376 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.560404 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.560415 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.561362 4941 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27" exitCode=0 Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.561405 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27"} Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.561534 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.562286 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.562306 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:21 crc kubenswrapper[4941]: I1130 06:46:21.562316 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.506425 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.567632 4941 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db" exitCode=0 Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.567717 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db"} Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.567844 4941 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.567902 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.567898 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.567902 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.569672 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.569734 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.569764 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.569817 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.569853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.569872 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.570524 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.570591 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.570616 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.692816 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.695093 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.695166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.695186 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:22 crc kubenswrapper[4941]: I1130 06:46:22.695236 4941 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 30 06:46:23 crc kubenswrapper[4941]: I1130 06:46:23.574930 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"28b79c8384dd4ff0369ffb7fefab6c2d8fc6711fdf3e75b5c6204c1d9d8aee6d"} Nov 30 06:46:23 crc kubenswrapper[4941]: I1130 06:46:23.575009 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"afbf53b573dcc7467bbdf678490cc227abc5dd89765e40b8896b7b99bc2b4091"} Nov 30 06:46:23 crc kubenswrapper[4941]: I1130 06:46:23.575038 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"389f098363a2ac6cd13000848fbec8fcc9f8124ec060b6f6875c570aebaaeab5"} Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.583556 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"43293ea156b7c394ed916e9bb2407bf3b84ebc887636b56561283d751b682230"} Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.583721 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2c98b8812b61b751baa2e0b37c485c3e8e1ccd9314228a4b456d82d9726d2ab1"} Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.583676 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.584985 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.585045 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.585067 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:24 crc kubenswrapper[4941]: I1130 06:46:24.847118 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 30 06:46:25 crc kubenswrapper[4941]: I1130 06:46:25.586870 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:25 crc kubenswrapper[4941]: I1130 06:46:25.588144 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:25 crc kubenswrapper[4941]: I1130 06:46:25.588200 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:25 crc kubenswrapper[4941]: I1130 06:46:25.588218 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.148118 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.148291 4941 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.148359 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.149982 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.150016 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.150029 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.590217 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.591376 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.591459 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:26 crc kubenswrapper[4941]: I1130 06:46:26.591481 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:27 crc kubenswrapper[4941]: I1130 06:46:27.414744 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 30 06:46:27 crc kubenswrapper[4941]: I1130 06:46:27.592233 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:27 crc kubenswrapper[4941]: I1130 06:46:27.593058 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:27 crc kubenswrapper[4941]: I1130 06:46:27.593077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:27 crc kubenswrapper[4941]: I1130 06:46:27.593085 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.215424 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.215729 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.217542 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.217586 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.217602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.225329 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.596031 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.597715 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.597772 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:28 crc kubenswrapper[4941]: I1130 06:46:28.597789 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.039103 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.039409 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.041052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.041088 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.041107 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.089685 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.090023 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.091998 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.092024 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.092038 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.148661 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.166467 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:29 crc kubenswrapper[4941]: E1130 06:46:29.588404 4941 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.598762 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.600143 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.600198 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:29 crc kubenswrapper[4941]: I1130 06:46:29.600216 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:30 crc kubenswrapper[4941]: I1130 06:46:30.456674 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:30 crc kubenswrapper[4941]: I1130 06:46:30.601377 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:30 crc kubenswrapper[4941]: I1130 06:46:30.602677 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:30 crc kubenswrapper[4941]: I1130 06:46:30.602773 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:30 crc kubenswrapper[4941]: I1130 06:46:30.602798 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:30 crc kubenswrapper[4941]: I1130 06:46:30.611019 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:31 crc kubenswrapper[4941]: I1130 06:46:31.603678 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:31 crc kubenswrapper[4941]: I1130 06:46:31.605091 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:31 crc kubenswrapper[4941]: I1130 06:46:31.605130 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:31 crc kubenswrapper[4941]: I1130 06:46:31.605144 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.454558 4941 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 30 06:46:32 crc kubenswrapper[4941]: E1130 06:46:32.462651 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.507143 4941 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded" start-of-body= Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.507547 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.606679 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.607990 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.608438 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.608455 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.631079 4941 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 30 06:46:32 crc kubenswrapper[4941]: I1130 06:46:32.631165 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 30 06:46:33 crc kubenswrapper[4941]: I1130 06:46:33.479456 4941 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 30 06:46:33 crc kubenswrapper[4941]: I1130 06:46:33.479562 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.447745 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.448004 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.449390 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.449447 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.449458 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.467353 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.514906 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.515199 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.516956 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.517074 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.517103 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.521709 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.621145 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.621219 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.622669 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.622715 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.622771 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.622919 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.622959 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.622978 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.627985 4941 trace.go:236] Trace[652914381]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Nov-2025 06:46:23.171) (total time: 14456ms): Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[652914381]: ---"Objects listed" error: 14456ms (06:46:37.627) Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[652914381]: [14.456246647s] [14.456246647s] END Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.628023 4941 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.628066 4941 trace.go:236] Trace[1152861143]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Nov-2025 06:46:23.001) (total time: 14626ms): Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[1152861143]: ---"Objects listed" error: 14626ms (06:46:37.627) Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[1152861143]: [14.626694568s] [14.626694568s] END Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.628096 4941 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.631651 4941 trace.go:236] Trace[303768712]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Nov-2025 06:46:23.080) (total time: 14551ms): Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[303768712]: ---"Objects listed" error: 14551ms (06:46:37.631) Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[303768712]: [14.551209529s] [14.551209529s] END Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.631706 4941 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.631937 4941 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 30 06:46:37 crc kubenswrapper[4941]: E1130 06:46:37.632304 4941 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.632734 4941 trace.go:236] Trace[2027381420]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Nov-2025 06:46:23.947) (total time: 13685ms): Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[2027381420]: ---"Objects listed" error: 13684ms (06:46:37.632) Nov 30 06:46:37 crc kubenswrapper[4941]: Trace[2027381420]: [13.68504689s] [13.68504689s] END Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.632773 4941 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.670665 4941 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60858->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.670755 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60858->192.168.126.11:17697: read: connection reset by peer" Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.671264 4941 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 30 06:46:37 crc kubenswrapper[4941]: I1130 06:46:37.671401 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.456866 4941 apiserver.go:52] "Watching apiserver" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.462237 4941 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.462600 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.463046 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.463099 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.463244 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.463065 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.463361 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.463340 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.463732 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.463776 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.463832 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.467026 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.467702 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.467796 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.467796 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.467837 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.467997 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.468613 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.469151 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.473312 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.509469 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.525112 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.539948 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.553764 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.557127 4941 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.564613 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.580925 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.592717 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.606562 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.627300 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.629432 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c" exitCode=255 Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.629492 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c"} Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.638361 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.638466 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.638509 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.638602 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639255 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639426 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639446 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639497 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639528 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639555 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639579 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639577 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639604 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639629 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639653 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639685 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639720 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639774 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639804 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639830 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639858 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639884 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639911 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639936 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639960 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.639985 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640009 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640038 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640080 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640102 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640142 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640165 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640188 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640213 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640211 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640240 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640263 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640270 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640295 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640340 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640398 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640423 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640423 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640447 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640472 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640497 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640519 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640524 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640567 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640593 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640621 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640648 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640636 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640670 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640664 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640698 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640720 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640742 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640763 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640785 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640806 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640830 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640852 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640873 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640896 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640917 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640939 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640961 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640983 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641005 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641027 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641051 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641077 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641100 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641160 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641185 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641209 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641233 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641255 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641288 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641312 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641350 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641370 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641395 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641418 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641441 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641471 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641494 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641516 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641540 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641563 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641583 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641633 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641657 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641676 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641698 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641723 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641744 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641769 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641792 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641815 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641840 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641863 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641886 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641909 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641930 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641950 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641969 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641988 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642013 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642067 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642096 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642119 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642141 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642165 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642187 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642208 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642233 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642318 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642360 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642380 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642398 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642419 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642439 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642460 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642485 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642509 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642531 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642552 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642576 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642598 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642623 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642645 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642665 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642684 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642701 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642716 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642734 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642750 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642766 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642782 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642800 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642907 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642925 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642940 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642957 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642976 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642993 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643010 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643390 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643416 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643558 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643585 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643609 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643808 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643829 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643882 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644130 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644186 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644203 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644228 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644358 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644436 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644459 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644570 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644620 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644643 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644692 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644718 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644742 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644766 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644842 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644867 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645056 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645162 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645189 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645211 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645235 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645259 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645283 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645360 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645426 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645569 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645664 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645687 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645724 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645748 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645769 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645793 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645815 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645868 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645892 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645913 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645929 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645973 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645991 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646009 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646117 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646144 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646168 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646197 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646223 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646247 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646272 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646406 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646452 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646480 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646512 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646543 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646570 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646599 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646626 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646653 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646713 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646742 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646770 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646797 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646820 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646899 4941 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646916 4941 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646930 4941 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646943 4941 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646957 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646970 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646983 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646996 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.647011 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.640781 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641013 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641092 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641095 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641175 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641456 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641741 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.641999 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642084 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642236 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.642785 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643233 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643275 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643476 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643469 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.643879 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644045 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644113 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.650538 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644362 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644551 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644801 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644875 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.645223 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646529 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.646908 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.647497 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.647912 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.648417 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.648465 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.648525 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.647817 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.648584 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.648604 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.648982 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.649207 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.649499 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.649715 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.650256 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.650404 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.644290 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.651854 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.652263 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.652262 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.652579 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.652886 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.653213 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.653366 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.653685 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.653924 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.654099 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656148 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656239 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656243 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656268 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656394 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656591 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.656944 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.657090 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.657150 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.657675 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.657925 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658002 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658050 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658227 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658313 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658340 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658377 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658720 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658722 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658747 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.658853 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.659016 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.659227 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.659286 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.659361 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.659606 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.659776 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.660066 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.660183 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.660684 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.660745 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.660961 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661318 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661156 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661447 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661440 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661515 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661546 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.661981 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.662015 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.662195 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.662243 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.662350 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.662926 4941 scope.go:117] "RemoveContainer" containerID="a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.663496 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.663366 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.660118 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.664188 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665155 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665155 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665288 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665625 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665667 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665677 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665999 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.666036 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.666094 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.666098 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.666642 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.666844 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.665944 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.667293 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.667300 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.667400 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.668742 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.668955 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.669296 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.669683 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.669731 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.667598 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.670737 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.671240 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.671311 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.671516 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.673308 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.673588 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.673675 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.673962 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.673997 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.674115 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.674404 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.674454 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.674491 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.674894 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.675164 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.675720 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.676093 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.676275 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:39.176231862 +0000 UTC m=+19.944403501 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.676471 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.677585 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.677630 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.677816 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.677981 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:39.177944485 +0000 UTC m=+19.946116274 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.678214 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.678321 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:46:39.178302557 +0000 UTC m=+19.946474176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.666613 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.678636 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.678761 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.678882 4941 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.678942 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.694044 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.694087 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.694107 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.694194 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:39.194166007 +0000 UTC m=+19.962337806 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.698580 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.698605 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.698620 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:38 crc kubenswrapper[4941]: E1130 06:46:38.698698 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:39.198678229 +0000 UTC m=+19.966849858 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.700077 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.702669 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.703057 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.704264 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.704617 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.704628 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.704933 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.706047 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.706061 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.706918 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.710063 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.713892 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.714169 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.714305 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.714765 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.715177 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.715292 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.716138 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.716388 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.716640 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.717691 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.718425 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.718967 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.719123 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.720019 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.720833 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.721018 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.721636 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.722252 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.722287 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.722132 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.723540 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.724382 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.724426 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.727604 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.727806 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.728107 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.728190 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.728296 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.728511 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.728778 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.729882 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.730113 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.730246 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.730460 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.730697 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.730713 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.730870 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.732789 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.736465 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.745556 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749700 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749843 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749913 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749926 4941 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749953 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749980 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.749989 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750043 4941 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750070 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750080 4941 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750089 4941 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750099 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750136 4941 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750146 4941 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750156 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750166 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750177 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750186 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750206 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750233 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750243 4941 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750252 4941 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750260 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750270 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750278 4941 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750286 4941 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750295 4941 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750303 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750311 4941 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750345 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750358 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750367 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750377 4941 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750386 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750395 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750405 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750416 4941 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750425 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750436 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750488 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750498 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750506 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750512 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750531 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750598 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750615 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750628 4941 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750643 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750658 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750672 4941 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750717 4941 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750731 4941 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750743 4941 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750758 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750770 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750782 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750794 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750808 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750823 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750837 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750849 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750862 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750875 4941 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750888 4941 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750901 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750913 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750928 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750940 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750956 4941 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750969 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750983 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.750999 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751017 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751033 4941 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751049 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751065 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751057 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751082 4941 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751097 4941 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751110 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751123 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751136 4941 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751158 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751171 4941 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751186 4941 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751197 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751209 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751222 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751235 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751247 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751259 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751274 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751285 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751298 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751310 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.751322 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752200 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752213 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752225 4941 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752238 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752251 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752266 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752278 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752290 4941 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752303 4941 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752315 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752346 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752362 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752375 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752387 4941 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752399 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752411 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752423 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752435 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752449 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752462 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752473 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752488 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752501 4941 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752515 4941 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752527 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752540 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752551 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752563 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752575 4941 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752587 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752599 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752611 4941 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752623 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752635 4941 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752647 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752658 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752671 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752685 4941 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752697 4941 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752709 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752721 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752733 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752745 4941 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752756 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752768 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752780 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752795 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752807 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752819 4941 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752832 4941 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752844 4941 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752857 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752869 4941 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752881 4941 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752893 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752907 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752919 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752931 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752944 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752955 4941 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752966 4941 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752978 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.752990 4941 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753002 4941 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753013 4941 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753024 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753036 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753048 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753059 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753075 4941 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753095 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753108 4941 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753120 4941 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753132 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753146 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753157 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753169 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753182 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753194 4941 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753206 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753218 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753230 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753242 4941 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753255 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753267 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753280 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753292 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753305 4941 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753317 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.753353 4941 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.756834 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.758022 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.759057 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.762943 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.787241 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.801229 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 30 06:46:38 crc kubenswrapper[4941]: W1130 06:46:38.803907 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-7f35fdc6810f5f0700b0ef671d5eb7341a9984f5bbffad9d714cea03e2596c45 WatchSource:0}: Error finding container 7f35fdc6810f5f0700b0ef671d5eb7341a9984f5bbffad9d714cea03e2596c45: Status 404 returned error can't find the container with id 7f35fdc6810f5f0700b0ef671d5eb7341a9984f5bbffad9d714cea03e2596c45 Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.810035 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 30 06:46:38 crc kubenswrapper[4941]: W1130 06:46:38.829694 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-00cc1cc4e34c4f8c7e82bf5bd3bc8a1829ae960f5f57d5f6b64fba82de39f5b5 WatchSource:0}: Error finding container 00cc1cc4e34c4f8c7e82bf5bd3bc8a1829ae960f5f57d5f6b64fba82de39f5b5: Status 404 returned error can't find the container with id 00cc1cc4e34c4f8c7e82bf5bd3bc8a1829ae960f5f57d5f6b64fba82de39f5b5 Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.853944 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.854370 4941 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:38 crc kubenswrapper[4941]: I1130 06:46:38.854392 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.258517 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.258604 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.258641 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.258668 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.258695 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258780 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:46:40.258748032 +0000 UTC m=+21.026919661 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258805 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258845 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258866 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258878 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258921 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:40.258885936 +0000 UTC m=+21.027057585 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258930 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258947 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258953 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258956 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:40.258938878 +0000 UTC m=+21.027110607 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258969 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.258984 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:40.258971549 +0000 UTC m=+21.027143188 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.259033 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:40.2590214 +0000 UTC m=+21.027193049 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.521054 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:39 crc kubenswrapper[4941]: E1130 06:46:39.521236 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.524359 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.524856 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.525665 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.526265 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.526812 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.527454 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.528061 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.528596 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.529160 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.531086 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.531611 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.532654 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.533138 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.534080 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.534731 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.535363 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.536472 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.536840 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.537856 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.538486 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.538905 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.539054 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.539876 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.540300 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.541571 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.542017 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.543024 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.543657 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.544103 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.544996 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.545457 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.546260 4941 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.546460 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.548014 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.548889 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.549291 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.551230 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.552285 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.553472 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.554370 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.555217 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.556406 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.557018 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.557875 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.558548 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.559135 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.560595 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.561125 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.562091 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.562851 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.563709 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.564194 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.564992 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.565508 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.566087 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.567441 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.582555 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.599022 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.643258 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"00cc1cc4e34c4f8c7e82bf5bd3bc8a1829ae960f5f57d5f6b64fba82de39f5b5"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.644796 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.644844 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.644857 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fd076d70407ae1e0f8789ab9bbf8f259d49ea2eea4771571e19403894366e0de"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.646414 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.646446 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7f35fdc6810f5f0700b0ef671d5eb7341a9984f5bbffad9d714cea03e2596c45"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.647715 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.648106 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.649585 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c"} Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.650026 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.671976 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.687243 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.701722 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.716983 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.732141 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.749765 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.765942 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.784218 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:39 crc kubenswrapper[4941]: I1130 06:46:39.809946 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.266170 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.266253 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.266285 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.266309 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.266366 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266460 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266465 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266486 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266491 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266498 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266504 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266529 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:46:42.266503005 +0000 UTC m=+23.034674614 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266599 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:42.266590398 +0000 UTC m=+23.034762007 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266612 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:42.266606329 +0000 UTC m=+23.034777938 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266529 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266450 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266836 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:42.266784394 +0000 UTC m=+23.034956043 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.266878 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:42.266861617 +0000 UTC m=+23.035033256 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.462553 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.467896 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.474237 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.482065 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.496645 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.510901 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.521607 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.521617 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.521771 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.521862 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.523682 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.544388 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.556279 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.571715 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.588445 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.605844 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.620714 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.633037 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.644999 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.658884 4941 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.659388 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.674078 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.690499 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.833007 4941 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.835009 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.835066 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.835077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.835145 4941 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.847865 4941 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.848199 4941 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.849538 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.849609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.849622 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.849640 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.849652 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:40Z","lastTransitionTime":"2025-11-30T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.870696 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.875748 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.875821 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.875839 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.875873 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.875890 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:40Z","lastTransitionTime":"2025-11-30T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.893226 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.897841 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.897915 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.897925 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.897944 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.897957 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:40Z","lastTransitionTime":"2025-11-30T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.909576 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.913049 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.913096 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.913108 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.913128 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.913142 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:40Z","lastTransitionTime":"2025-11-30T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.924300 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.928059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.928107 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.928120 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.928138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.928149 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:40Z","lastTransitionTime":"2025-11-30T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.941598 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:40Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:40 crc kubenswrapper[4941]: E1130 06:46:40.941849 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.944201 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.944264 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.944279 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.944318 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:40 crc kubenswrapper[4941]: I1130 06:46:40.944348 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:40Z","lastTransitionTime":"2025-11-30T06:46:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.047581 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.047639 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.047654 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.047675 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.047693 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.149809 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.149854 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.149866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.149908 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.149923 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.255699 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.255757 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.255770 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.255789 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.255806 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.296665 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-sm9jf"] Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.297081 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.297856 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-vv76k"] Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.298032 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.299357 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.299610 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.299741 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.300788 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.300886 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.301403 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.301458 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.314280 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.332488 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.346839 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.358576 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.358628 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.358639 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.358659 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.358671 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.378117 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.405776 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.422897 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.443567 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.455244 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.461011 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.461057 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.461069 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.461092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.461103 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.467341 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.477235 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/b02b1490-bb42-429b-8d83-592b38482a87-hosts-file\") pod \"node-resolver-vv76k\" (UID: \"b02b1490-bb42-429b-8d83-592b38482a87\") " pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.477434 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/096a351c-31d5-4186-8833-cf6693f30cc7-host\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.477480 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2snb\" (UniqueName: \"kubernetes.io/projected/b02b1490-bb42-429b-8d83-592b38482a87-kube-api-access-b2snb\") pod \"node-resolver-vv76k\" (UID: \"b02b1490-bb42-429b-8d83-592b38482a87\") " pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.477502 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/096a351c-31d5-4186-8833-cf6693f30cc7-serviceca\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.477571 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8mw6\" (UniqueName: \"kubernetes.io/projected/096a351c-31d5-4186-8833-cf6693f30cc7-kube-api-access-d8mw6\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.479452 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.491202 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.500843 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.514495 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.520859 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:41 crc kubenswrapper[4941]: E1130 06:46:41.520986 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.532912 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.546322 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.558232 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.563997 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.564044 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.564054 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.564071 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.564081 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.574137 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.578807 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/096a351c-31d5-4186-8833-cf6693f30cc7-serviceca\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.578847 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8mw6\" (UniqueName: \"kubernetes.io/projected/096a351c-31d5-4186-8833-cf6693f30cc7-kube-api-access-d8mw6\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.578879 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/b02b1490-bb42-429b-8d83-592b38482a87-hosts-file\") pod \"node-resolver-vv76k\" (UID: \"b02b1490-bb42-429b-8d83-592b38482a87\") " pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.578923 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/096a351c-31d5-4186-8833-cf6693f30cc7-host\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.578944 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2snb\" (UniqueName: \"kubernetes.io/projected/b02b1490-bb42-429b-8d83-592b38482a87-kube-api-access-b2snb\") pod \"node-resolver-vv76k\" (UID: \"b02b1490-bb42-429b-8d83-592b38482a87\") " pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.579076 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/b02b1490-bb42-429b-8d83-592b38482a87-hosts-file\") pod \"node-resolver-vv76k\" (UID: \"b02b1490-bb42-429b-8d83-592b38482a87\") " pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.579153 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/096a351c-31d5-4186-8833-cf6693f30cc7-host\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.580996 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/096a351c-31d5-4186-8833-cf6693f30cc7-serviceca\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.589971 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.598931 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2snb\" (UniqueName: \"kubernetes.io/projected/b02b1490-bb42-429b-8d83-592b38482a87-kube-api-access-b2snb\") pod \"node-resolver-vv76k\" (UID: \"b02b1490-bb42-429b-8d83-592b38482a87\") " pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.602501 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8mw6\" (UniqueName: \"kubernetes.io/projected/096a351c-31d5-4186-8833-cf6693f30cc7-kube-api-access-d8mw6\") pod \"node-ca-sm9jf\" (UID: \"096a351c-31d5-4186-8833-cf6693f30cc7\") " pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.603579 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.610840 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-sm9jf" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.615922 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vv76k" Nov 30 06:46:41 crc kubenswrapper[4941]: W1130 06:46:41.628730 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb02b1490_bb42_429b_8d83_592b38482a87.slice/crio-baf3b5730e11263d720f06d7c1926e977ebd7ce54e067a8de9c7eadc22be9a27 WatchSource:0}: Error finding container baf3b5730e11263d720f06d7c1926e977ebd7ce54e067a8de9c7eadc22be9a27: Status 404 returned error can't find the container with id baf3b5730e11263d720f06d7c1926e977ebd7ce54e067a8de9c7eadc22be9a27 Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.655797 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vv76k" event={"ID":"b02b1490-bb42-429b-8d83-592b38482a87","Type":"ContainerStarted","Data":"baf3b5730e11263d720f06d7c1926e977ebd7ce54e067a8de9c7eadc22be9a27"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.657148 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-sm9jf" event={"ID":"096a351c-31d5-4186-8833-cf6693f30cc7","Type":"ContainerStarted","Data":"6ef3bc209e4dfdeaf22e1ad0115ce901bed82cabd31ca98741f2fe08ff17d6ab"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.658969 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.675004 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.675065 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.675084 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.675111 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.675131 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.674992 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.687719 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.693804 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-5pscg"] Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.700120 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.705981 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.706202 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.706264 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.706464 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.706580 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.708895 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.731234 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.746574 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.761952 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.780303 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.780354 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.780364 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.780380 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.780391 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.791662 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.812941 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.849009 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.880928 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6d39a3a2-8387-4108-aad6-3bfd59ad0018-rootfs\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.880969 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6d39a3a2-8387-4108-aad6-3bfd59ad0018-mcd-auth-proxy-config\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.881017 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6d39a3a2-8387-4108-aad6-3bfd59ad0018-proxy-tls\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.881034 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8mtc\" (UniqueName: \"kubernetes.io/projected/6d39a3a2-8387-4108-aad6-3bfd59ad0018-kube-api-access-b8mtc\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.882271 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.882298 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.882307 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.882346 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.882357 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.887841 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.911622 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.927663 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.939919 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.965703 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.980350 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.981609 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6d39a3a2-8387-4108-aad6-3bfd59ad0018-rootfs\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.981646 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6d39a3a2-8387-4108-aad6-3bfd59ad0018-mcd-auth-proxy-config\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.981674 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6d39a3a2-8387-4108-aad6-3bfd59ad0018-proxy-tls\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.981690 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8mtc\" (UniqueName: \"kubernetes.io/projected/6d39a3a2-8387-4108-aad6-3bfd59ad0018-kube-api-access-b8mtc\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.981770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6d39a3a2-8387-4108-aad6-3bfd59ad0018-rootfs\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.982550 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6d39a3a2-8387-4108-aad6-3bfd59ad0018-mcd-auth-proxy-config\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.984062 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.984085 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.984095 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.984110 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.984124 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:41Z","lastTransitionTime":"2025-11-30T06:46:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.986163 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6d39a3a2-8387-4108-aad6-3bfd59ad0018-proxy-tls\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.997355 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:41Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:41 crc kubenswrapper[4941]: I1130 06:46:41.999922 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8mtc\" (UniqueName: \"kubernetes.io/projected/6d39a3a2-8387-4108-aad6-3bfd59ad0018-kube-api-access-b8mtc\") pod \"machine-config-daemon-5pscg\" (UID: \"6d39a3a2-8387-4108-aad6-3bfd59ad0018\") " pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.013992 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.025188 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:46:42 crc kubenswrapper[4941]: W1130 06:46:42.036666 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d39a3a2_8387_4108_aad6_3bfd59ad0018.slice/crio-2283ee641e07cb1d0929dfd5792fe45b85d5fbf614491f38bd0a054dc0ca9fee WatchSource:0}: Error finding container 2283ee641e07cb1d0929dfd5792fe45b85d5fbf614491f38bd0a054dc0ca9fee: Status 404 returned error can't find the container with id 2283ee641e07cb1d0929dfd5792fe45b85d5fbf614491f38bd0a054dc0ca9fee Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.037572 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.062400 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.082678 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.086953 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.086987 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.086996 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.087012 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.087024 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.096633 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-vzc7c"] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.096884 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-zr2rg"] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.097436 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.097745 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.101058 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.101263 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.102367 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.102415 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.103660 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.103901 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.104009 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zntd2"] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.107279 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.112765 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.112937 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.112758 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.113038 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.112853 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.112878 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.112912 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.113231 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.115007 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.127309 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.141614 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.155412 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.168203 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.178733 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182643 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-socket-dir-parent\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182684 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-conf-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182714 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a2c22971-565b-44b0-9312-737c3931a558-multus-daemon-config\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182744 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2c22971-565b-44b0-9312-737c3931a558-cni-binary-copy\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182768 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182791 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-os-release\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182812 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-netns\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182872 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-cnibin\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182897 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-etc-kubernetes\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182929 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-k8s-cni-cncf-io\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182956 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-hostroot\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.182980 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mftnq\" (UniqueName: \"kubernetes.io/projected/a2c22971-565b-44b0-9312-737c3931a558-kube-api-access-mftnq\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183003 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-system-cni-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183033 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-cni-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183056 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-cni-multus\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183085 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2f8k\" (UniqueName: \"kubernetes.io/projected/1eab361f-8591-4bbd-8dce-a51a1b95af2f-kube-api-access-z2f8k\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183120 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-cni-bin\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183201 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-kubelet\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183257 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cnibin\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183282 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-os-release\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183317 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183381 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-multus-certs\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183413 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-system-cni-dir\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.183428 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cni-binary-copy\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.189680 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.189806 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.189866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.189935 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.189991 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.198680 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.209465 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.225163 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.240142 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.256207 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.270166 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284274 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284436 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-etc-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284465 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2f8k\" (UniqueName: \"kubernetes.io/projected/1eab361f-8591-4bbd-8dce-a51a1b95af2f-kube-api-access-z2f8k\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284486 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-slash\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.284534 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:46:46.284495351 +0000 UTC m=+27.052666960 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284616 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284665 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-cni-bin\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284694 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-kubelet\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284717 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cnibin\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-os-release\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284748 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-kubelet\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284766 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-ovn-kubernetes\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284798 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cnibin\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-config\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.284841 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.284860 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.284874 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284903 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-os-release\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.284919 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:46.284900554 +0000 UTC m=+27.053072163 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284919 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.284985 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-env-overrides\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285020 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285039 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-multus-certs\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285058 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-kubelet\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285074 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-systemd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285097 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-multus-certs\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285117 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-cni-bin\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285140 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.285152 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285175 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-system-cni-dir\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285205 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cni-binary-copy\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.285182 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285235 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6217364-7317-4ee9-957e-9a1764ff0342-ovn-node-metrics-cert\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.285248 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285259 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285282 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-log-socket\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-system-cni-dir\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.285312 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:46.285292056 +0000 UTC m=+27.053463665 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285343 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-bin\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285366 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-socket-dir-parent\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285382 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-conf-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285397 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a2c22971-565b-44b0-9312-737c3931a558-multus-daemon-config\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285413 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-netns\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2c22971-565b-44b0-9312-737c3931a558-cni-binary-copy\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285450 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-script-lib\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285455 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-conf-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285468 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285497 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-os-release\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285515 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-netns\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285519 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-socket-dir-parent\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285549 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-systemd-units\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285569 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-ovn\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285595 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285615 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-cnibin\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285633 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-etc-kubernetes\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285651 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-var-lib-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285666 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-node-log\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285684 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-netd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285702 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285718 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdvkd\" (UniqueName: \"kubernetes.io/projected/a6217364-7317-4ee9-957e-9a1764ff0342-kube-api-access-kdvkd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285736 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-k8s-cni-cncf-io\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285751 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-hostroot\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285766 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mftnq\" (UniqueName: \"kubernetes.io/projected/a2c22971-565b-44b0-9312-737c3931a558-kube-api-access-mftnq\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285781 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-system-cni-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285795 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-cni-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285813 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-cni-multus\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285873 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-var-lib-cni-multus\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285898 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285924 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/1eab361f-8591-4bbd-8dce-a51a1b95af2f-cni-binary-copy\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285970 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-netns\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285976 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-os-release\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.285985 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-host-run-k8s-cni-cncf-io\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286032 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-etc-kubernetes\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286043 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-system-cni-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286078 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-cnibin\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286131 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-hostroot\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.286136 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286131 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a2c22971-565b-44b0-9312-737c3931a558-multus-cni-dir\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.286198 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286201 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a2c22971-565b-44b0-9312-737c3931a558-multus-daemon-config\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.286203 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:46.286185214 +0000 UTC m=+27.054356823 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.286243 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:46.286234965 +0000 UTC m=+27.054406574 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286312 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/1eab361f-8591-4bbd-8dce-a51a1b95af2f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.286492 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a2c22971-565b-44b0-9312-737c3931a558-cni-binary-copy\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.292665 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.292702 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.292718 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.292737 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.292749 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.304805 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.305248 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2f8k\" (UniqueName: \"kubernetes.io/projected/1eab361f-8591-4bbd-8dce-a51a1b95af2f-kube-api-access-z2f8k\") pod \"multus-additional-cni-plugins-zr2rg\" (UID: \"1eab361f-8591-4bbd-8dce-a51a1b95af2f\") " pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.313561 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mftnq\" (UniqueName: \"kubernetes.io/projected/a2c22971-565b-44b0-9312-737c3931a558-kube-api-access-mftnq\") pod \"multus-vzc7c\" (UID: \"a2c22971-565b-44b0-9312-737c3931a558\") " pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.328976 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.347485 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387169 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-slash\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387236 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-ovn-kubernetes\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387259 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-config\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387284 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-env-overrides\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387301 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-slash\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387340 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-kubelet\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387363 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-systemd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387385 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-kubelet\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387389 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6217364-7317-4ee9-957e-9a1764ff0342-ovn-node-metrics-cert\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387404 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-ovn-kubernetes\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387443 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-log-socket\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387427 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-log-socket\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387591 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-bin\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387627 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387489 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-systemd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387654 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-netns\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387667 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387629 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-bin\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387675 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-script-lib\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387694 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-netns\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387774 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-systemd-units\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387794 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-ovn\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387817 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-node-log\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387836 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-netd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-var-lib-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387854 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-systemd-units\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387889 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-node-log\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387898 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387877 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387867 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-ovn\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387930 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-var-lib-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387942 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-netd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.387977 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdvkd\" (UniqueName: \"kubernetes.io/projected/a6217364-7317-4ee9-957e-9a1764ff0342-kube-api-access-kdvkd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.388011 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-env-overrides\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.388151 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-etc-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.388214 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-etc-openvswitch\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.388302 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-script-lib\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.388591 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-config\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.390477 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6217364-7317-4ee9-957e-9a1764ff0342-ovn-node-metrics-cert\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.394802 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.394828 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.394838 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.394854 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.394862 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.410947 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdvkd\" (UniqueName: \"kubernetes.io/projected/a6217364-7317-4ee9-957e-9a1764ff0342-kube-api-access-kdvkd\") pod \"ovnkube-node-zntd2\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.415747 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.422792 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vzc7c" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.429550 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:42 crc kubenswrapper[4941]: W1130 06:46:42.430670 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1eab361f_8591_4bbd_8dce_a51a1b95af2f.slice/crio-1491977c99364eee89aa4a32120ee20395cc8d4399b368d3aa8592deb1a50b7b WatchSource:0}: Error finding container 1491977c99364eee89aa4a32120ee20395cc8d4399b368d3aa8592deb1a50b7b: Status 404 returned error can't find the container with id 1491977c99364eee89aa4a32120ee20395cc8d4399b368d3aa8592deb1a50b7b Nov 30 06:46:42 crc kubenswrapper[4941]: W1130 06:46:42.434187 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2c22971_565b_44b0_9312_737c3931a558.slice/crio-fb2308e338c8bf923fe3d0f5d3dc5622c6d7571aef95e0572da842e3bc94d3ff WatchSource:0}: Error finding container fb2308e338c8bf923fe3d0f5d3dc5622c6d7571aef95e0572da842e3bc94d3ff: Status 404 returned error can't find the container with id fb2308e338c8bf923fe3d0f5d3dc5622c6d7571aef95e0572da842e3bc94d3ff Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.506425 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.506470 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.506483 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.506505 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.506517 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.520709 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.520857 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.521066 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:42 crc kubenswrapper[4941]: E1130 06:46:42.521233 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.611205 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.611616 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.611627 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.611647 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.611660 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.661623 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-sm9jf" event={"ID":"096a351c-31d5-4186-8833-cf6693f30cc7","Type":"ContainerStarted","Data":"34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.663019 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerStarted","Data":"475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.663100 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerStarted","Data":"fb2308e338c8bf923fe3d0f5d3dc5622c6d7571aef95e0572da842e3bc94d3ff"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.664666 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" exitCode=0 Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.664767 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.664846 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"8afa66c174f64be3cab1d8a845dec34f66aef255f2e5692b9e1e17ff7301f2c2"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.665856 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerStarted","Data":"1491977c99364eee89aa4a32120ee20395cc8d4399b368d3aa8592deb1a50b7b"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.667803 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.667850 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.667862 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"2283ee641e07cb1d0929dfd5792fe45b85d5fbf614491f38bd0a054dc0ca9fee"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.669709 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vv76k" event={"ID":"b02b1490-bb42-429b-8d83-592b38482a87","Type":"ContainerStarted","Data":"29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.681950 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.691117 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.703502 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.714654 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.714694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.714705 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.714724 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.714734 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.715500 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.726166 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.742268 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.758176 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.773824 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.787297 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.799707 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.811556 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.816873 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.816896 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.816907 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.816924 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.816934 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.822196 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.834022 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.844016 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.860209 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.878470 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.890019 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.901899 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.914887 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.919491 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.919537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.919549 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.919568 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.919580 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:42Z","lastTransitionTime":"2025-11-30T06:46:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.929301 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.949343 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:42 crc kubenswrapper[4941]: I1130 06:46:42.985638 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:42Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.021855 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.021889 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.021899 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.021915 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.021926 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.030433 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.066350 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.106118 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.127857 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.127896 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.127906 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.127922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.127932 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.148884 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.204944 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.231355 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.231627 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.231659 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.231675 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.231700 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.231722 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.336077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.336128 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.336149 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.336173 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.336190 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.438897 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.439305 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.439319 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.439363 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.439378 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.521110 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:43 crc kubenswrapper[4941]: E1130 06:46:43.521235 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.541561 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.541612 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.541675 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.541702 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.541718 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.644056 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.644644 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.644670 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.644694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.644710 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.681624 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.681716 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.681732 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.681746 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.681759 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.683216 4941 generic.go:334] "Generic (PLEG): container finished" podID="1eab361f-8591-4bbd-8dce-a51a1b95af2f" containerID="21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1" exitCode=0 Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.683281 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerDied","Data":"21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.702435 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.717955 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.735459 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.747274 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.747305 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.747316 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.747346 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.747356 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.756569 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.805123 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.832283 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.853934 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.856043 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.856091 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.856105 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.856127 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.856152 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.869627 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.881905 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.892725 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.915063 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.927852 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.938426 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.956558 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.958175 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.958220 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.958230 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.958246 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:43 crc kubenswrapper[4941]: I1130 06:46:43.958257 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:43Z","lastTransitionTime":"2025-11-30T06:46:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.060652 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.060696 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.060707 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.060726 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.060739 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.163298 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.163366 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.163380 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.163398 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.163410 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.265739 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.265825 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.265847 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.265880 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.265906 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.368703 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.368770 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.368788 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.368815 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.368834 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.471193 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.471231 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.471243 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.471260 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.471272 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.520771 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.520842 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:44 crc kubenswrapper[4941]: E1130 06:46:44.521344 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:44 crc kubenswrapper[4941]: E1130 06:46:44.521456 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.574377 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.574425 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.574437 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.574455 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.574468 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.677079 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.677136 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.677173 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.677193 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.677211 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.687628 4941 generic.go:334] "Generic (PLEG): container finished" podID="1eab361f-8591-4bbd-8dce-a51a1b95af2f" containerID="c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8" exitCode=0 Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.687735 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerDied","Data":"c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.694530 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.709543 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.738434 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.752847 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.771621 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.781417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.781460 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.781476 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.781492 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.781500 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.788428 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.819006 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.832706 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.846025 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.861457 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.872966 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.885211 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.885264 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.885276 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.885295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.885311 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.892173 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.907648 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.922223 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.938209 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:44Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.988506 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.988553 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.988564 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.988582 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:44 crc kubenswrapper[4941]: I1130 06:46:44.988598 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:44Z","lastTransitionTime":"2025-11-30T06:46:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.090830 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.090875 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.090887 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.090903 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.090914 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.193367 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.193408 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.193434 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.193453 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.193475 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.297545 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.297586 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.297595 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.297613 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.297624 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.400652 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.400709 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.400722 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.400744 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.400763 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.503827 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.503883 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.503898 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.503920 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.503935 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.521238 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:45 crc kubenswrapper[4941]: E1130 06:46:45.521404 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.606417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.606462 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.606472 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.606492 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.606505 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.701943 4941 generic.go:334] "Generic (PLEG): container finished" podID="1eab361f-8591-4bbd-8dce-a51a1b95af2f" containerID="94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e" exitCode=0 Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.702016 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerDied","Data":"94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.708310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.708410 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.708435 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.708462 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.708484 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.715933 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.731435 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.748865 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.767240 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.782319 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.807446 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.816072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.816121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.816140 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.816164 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.816183 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.822820 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.837235 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.850906 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.905720 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.917970 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.918005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.918015 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.918032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.918042 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:45Z","lastTransitionTime":"2025-11-30T06:46:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.920154 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.941475 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.955815 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:45 crc kubenswrapper[4941]: I1130 06:46:45.971124 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:45Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.021646 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.021699 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.021712 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.021732 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.021749 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.123943 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.123983 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.123994 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.124009 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.124018 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.226820 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.226866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.226875 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.226892 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.226905 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.328652 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.328869 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.328954 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:46:54.328903446 +0000 UTC m=+35.097075095 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329017 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.329069 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329106 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:54.329080552 +0000 UTC m=+35.097252201 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.329191 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.329370 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329447 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329456 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329483 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329496 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329501 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329520 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329543 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329575 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:54.329558877 +0000 UTC m=+35.097730526 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329625 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:54.329602099 +0000 UTC m=+35.097773708 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.329643 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:54.32963644 +0000 UTC m=+35.097808049 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.330409 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.330442 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.330452 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.330466 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.330482 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.433120 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.433158 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.433167 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.433183 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.433193 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.521054 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.521108 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.521228 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:46 crc kubenswrapper[4941]: E1130 06:46:46.521376 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.541681 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.541747 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.541759 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.541805 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.541818 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.644961 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.645022 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.645044 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.645068 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.645086 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.709812 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.715783 4941 generic.go:334] "Generic (PLEG): container finished" podID="1eab361f-8591-4bbd-8dce-a51a1b95af2f" containerID="6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976" exitCode=0 Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.715825 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerDied","Data":"6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.741667 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.758000 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.758055 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.758070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.758094 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.758110 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.762983 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.786249 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.803261 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.820495 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.835981 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.848210 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.862979 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.863001 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.863010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.863024 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.863033 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.863887 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.883015 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.900246 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.913792 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.935486 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.950182 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.962371 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:46Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.965988 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.966037 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.966052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.966073 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:46 crc kubenswrapper[4941]: I1130 06:46:46.966087 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:46Z","lastTransitionTime":"2025-11-30T06:46:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.069767 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.069819 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.069836 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.069864 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.069880 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.176075 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.176135 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.176153 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.176177 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.176194 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.280183 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.280382 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.280535 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.280626 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.280689 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.384683 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.385193 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.385377 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.385571 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.385728 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.488920 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.488974 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.488991 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.489017 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.489034 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.521500 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:47 crc kubenswrapper[4941]: E1130 06:46:47.522148 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.592091 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.592166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.592191 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.592222 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.592244 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.696361 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.696665 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.696746 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.696818 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.696883 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.731367 4941 generic.go:334] "Generic (PLEG): container finished" podID="1eab361f-8591-4bbd-8dce-a51a1b95af2f" containerID="28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7" exitCode=0 Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.731433 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerDied","Data":"28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.752912 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.782288 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.801245 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.801309 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.801352 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.801379 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.801397 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.804298 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.825161 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.841767 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.874584 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.892206 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.905236 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.905293 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.905308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.905356 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.905371 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:47Z","lastTransitionTime":"2025-11-30T06:46:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.915291 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.933649 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.954129 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.971662 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:47 crc kubenswrapper[4941]: I1130 06:46:47.990989 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:47Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.010014 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.010061 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.010074 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.010092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.010106 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.011457 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.032440 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.112977 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.113036 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.113053 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.113080 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.113100 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.217032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.217106 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.217125 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.217157 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.217177 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.321529 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.321591 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.321609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.321634 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.321651 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.425486 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.425562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.425582 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.425613 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.425633 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.521568 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.521666 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:48 crc kubenswrapper[4941]: E1130 06:46:48.521795 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:48 crc kubenswrapper[4941]: E1130 06:46:48.521958 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.531109 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.531141 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.531152 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.531169 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.531179 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.633247 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.633293 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.633302 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.633347 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.633364 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.741986 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.742051 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.742069 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.742096 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.742118 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.748022 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.748366 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.748385 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.754889 4941 generic.go:334] "Generic (PLEG): container finished" podID="1eab361f-8591-4bbd-8dce-a51a1b95af2f" containerID="3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a" exitCode=0 Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.754965 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerDied","Data":"3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.768952 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.791996 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.810759 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.827903 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.829055 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.830424 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.845161 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.845232 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.845252 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.845320 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.845367 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.849753 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.866803 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.887938 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.901004 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.917958 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.931135 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.948876 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.949040 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.949069 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.949105 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.949131 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:48Z","lastTransitionTime":"2025-11-30T06:46:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.952143 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.976118 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:48 crc kubenswrapper[4941]: I1130 06:46:48.990662 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:48Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.019722 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.038953 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.043016 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.052161 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.052198 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.052209 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.052224 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.052241 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.055784 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.082549 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.096172 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.108075 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.123108 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.140594 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.154785 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.154834 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.154850 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.154873 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.154891 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.162047 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.178520 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.192703 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.207155 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.219556 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.230581 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.257567 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.257625 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.257636 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.257652 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.257662 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.318928 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.336867 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.350244 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.359519 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.359551 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.359558 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.359572 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.359584 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.367628 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.382688 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.393359 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.406761 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.423958 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.438554 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.463522 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.463562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.463573 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.463589 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.463599 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.469406 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.491654 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.512629 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.526854 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:49 crc kubenswrapper[4941]: E1130 06:46:49.527039 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.541123 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.554503 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.565727 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.565781 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.565794 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.565817 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.565832 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.569589 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.583540 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.604249 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.625600 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.644590 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.656865 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.669258 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.669296 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.669306 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.669340 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.669353 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.670126 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.679170 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.690847 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.703643 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.714910 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.731169 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.746111 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.762187 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" event={"ID":"1eab361f-8591-4bbd-8dce-a51a1b95af2f","Type":"ContainerStarted","Data":"c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.762203 4941 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.764812 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.771647 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.771680 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.771691 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.771705 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.771715 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.778274 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.795758 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.808396 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.837612 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.858583 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.873098 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.874457 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.874548 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.874572 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.874597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.874611 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.892260 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.915513 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.933935 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.948828 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.963990 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.977173 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.977209 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.977223 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.977242 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.977258 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:49Z","lastTransitionTime":"2025-11-30T06:46:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.979893 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:49 crc kubenswrapper[4941]: I1130 06:46:49.994552 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.005021 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:50Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.022394 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:50Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.080160 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.080200 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.080210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.080229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.080240 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.183715 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.183801 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.183824 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.183858 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.183880 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.287311 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.287421 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.287448 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.287478 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.287498 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.391151 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.391272 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.391292 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.391320 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.391371 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.494609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.494695 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.494715 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.494752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.494779 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.521154 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.521186 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:50 crc kubenswrapper[4941]: E1130 06:46:50.521433 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:50 crc kubenswrapper[4941]: E1130 06:46:50.521705 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.597607 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.597682 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.597702 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.597732 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.597753 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.700353 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.700409 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.700426 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.700448 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.700460 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.765895 4941 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.803237 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.803299 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.803308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.803368 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.803393 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.906564 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.906614 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.906626 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.906646 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:50 crc kubenswrapper[4941]: I1130 06:46:50.906661 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:50Z","lastTransitionTime":"2025-11-30T06:46:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.010303 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.010389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.010407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.010430 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.010446 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.113506 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.113571 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.113708 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.113741 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.113800 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.189940 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.190005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.190017 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.190035 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.190047 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.206740 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.211404 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.211460 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.211476 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.211500 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.211516 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.226302 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.232144 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.232212 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.232234 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.232260 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.232275 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.247384 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.254048 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.254176 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.254193 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.254215 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.254235 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.272175 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.278587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.278660 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.278673 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.278699 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.278715 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.295142 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.295304 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.297927 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.297975 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.297985 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.298004 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.298018 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.400657 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.400729 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.400752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.400784 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.400806 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.498499 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.503952 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.504006 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.504023 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.504050 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.504069 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.521185 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:51 crc kubenswrapper[4941]: E1130 06:46:51.521470 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.607455 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.607493 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.607503 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.607522 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.607533 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.710554 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.710629 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.710647 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.710675 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.710695 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.771901 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/0.log" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.775176 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d" exitCode=1 Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.775240 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.776060 4941 scope.go:117] "RemoveContainer" containerID="5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.792675 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.807082 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.812948 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.813019 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.813037 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.813067 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.813085 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.821750 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.837417 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.850022 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.870761 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.894239 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.913563 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.915402 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.915428 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.915438 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.915454 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.915468 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:51Z","lastTransitionTime":"2025-11-30T06:46:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.931057 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.962289 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:51 crc kubenswrapper[4941]: I1130 06:46:51.981872 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:51.998674 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:51Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.012685 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.018313 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.018367 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.018383 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.018401 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.018414 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.037305 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.120650 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.120690 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.120703 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.120721 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.120735 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.223526 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.223583 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.223601 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.223624 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.223643 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.326095 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.326128 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.326138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.326155 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.326165 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.428570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.428606 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.428614 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.428628 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.428640 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.521452 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.521556 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:52 crc kubenswrapper[4941]: E1130 06:46:52.521651 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:52 crc kubenswrapper[4941]: E1130 06:46:52.521881 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.532523 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.532574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.532587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.532605 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.532693 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.635740 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.635799 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.635811 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.635850 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.635864 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.738501 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.738542 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.738549 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.738569 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.738579 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.780610 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/0.log" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.783343 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.783766 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.797250 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.809226 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.834441 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.840576 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.840638 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.840658 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.840688 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.840707 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.852380 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.870905 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.886203 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.897436 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.910036 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.929519 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.943085 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.943190 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.943217 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.943261 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.943283 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:52Z","lastTransitionTime":"2025-11-30T06:46:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.945508 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.964276 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.981075 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:52 crc kubenswrapper[4941]: I1130 06:46:52.994708 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:52Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.012657 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:53Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.046299 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.046418 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.046444 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.046476 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.046496 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.149375 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.149435 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.149455 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.149482 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.149499 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.253494 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.253602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.253627 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.253669 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.253694 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.356570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.356644 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.356657 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.356695 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.356715 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.459672 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.459738 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.459757 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.459783 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.459804 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.521680 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:53 crc kubenswrapper[4941]: E1130 06:46:53.521876 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.561995 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.562039 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.562052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.562072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.562085 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.664785 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.664826 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.664836 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.664852 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.664860 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.767448 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.767561 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.767574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.767596 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.767610 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.870519 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.870568 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.870581 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.870601 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.870614 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.973512 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.973557 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.973569 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.973590 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:53 crc kubenswrapper[4941]: I1130 06:46:53.973602 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:53Z","lastTransitionTime":"2025-11-30T06:46:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.077414 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.077461 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.077474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.077493 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.077506 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.180869 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.180954 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.180974 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.181002 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.181019 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.254039 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq"] Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.254561 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.257162 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.257400 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.284362 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.284438 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.284457 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.284486 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.284503 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.288259 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.304064 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.321507 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.338755 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.353791 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.368507 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.383279 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.386904 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.386940 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.386950 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.386970 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.386983 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.400529 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.411037 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416101 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416218 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8q2z\" (UniqueName: \"kubernetes.io/projected/e42ab3a0-995b-4132-af38-66b45838b5b2-kube-api-access-z8q2z\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416254 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:47:10.416230491 +0000 UTC m=+51.184402100 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416279 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e42ab3a0-995b-4132-af38-66b45838b5b2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416350 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416380 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e42ab3a0-995b-4132-af38-66b45838b5b2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416402 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416422 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e42ab3a0-995b-4132-af38-66b45838b5b2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416445 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.416465 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416516 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416576 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416576 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416591 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416599 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416602 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416611 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416638 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:10.416620112 +0000 UTC m=+51.184791811 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416662 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416666 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:10.416654483 +0000 UTC m=+51.184826112 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416684 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:10.416676714 +0000 UTC m=+51.184848333 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.416697 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:10.416690914 +0000 UTC m=+51.184862533 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.425504 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.438270 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.451422 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.467042 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.478855 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.488881 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.489805 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.489884 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.489910 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.489942 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.489970 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.517243 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8q2z\" (UniqueName: \"kubernetes.io/projected/e42ab3a0-995b-4132-af38-66b45838b5b2-kube-api-access-z8q2z\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.517285 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e42ab3a0-995b-4132-af38-66b45838b5b2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.517342 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e42ab3a0-995b-4132-af38-66b45838b5b2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.517379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e42ab3a0-995b-4132-af38-66b45838b5b2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.517826 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e42ab3a0-995b-4132-af38-66b45838b5b2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.518521 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e42ab3a0-995b-4132-af38-66b45838b5b2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.521647 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.521828 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.521982 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.522211 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.524074 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e42ab3a0-995b-4132-af38-66b45838b5b2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.534563 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8q2z\" (UniqueName: \"kubernetes.io/projected/e42ab3a0-995b-4132-af38-66b45838b5b2-kube-api-access-z8q2z\") pod \"ovnkube-control-plane-749d76644c-5x2fq\" (UID: \"e42ab3a0-995b-4132-af38-66b45838b5b2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.570835 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" Nov 30 06:46:54 crc kubenswrapper[4941]: W1130 06:46:54.583601 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode42ab3a0_995b_4132_af38_66b45838b5b2.slice/crio-164fbc8ef85de5b57053d6d4318ee1e9cd3e703d17563bc03217ddd352eba8d9 WatchSource:0}: Error finding container 164fbc8ef85de5b57053d6d4318ee1e9cd3e703d17563bc03217ddd352eba8d9: Status 404 returned error can't find the container with id 164fbc8ef85de5b57053d6d4318ee1e9cd3e703d17563bc03217ddd352eba8d9 Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.591859 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.591896 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.591909 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.591925 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.591938 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.694320 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.694455 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.694485 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.694522 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.694546 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.791634 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" event={"ID":"e42ab3a0-995b-4132-af38-66b45838b5b2","Type":"ContainerStarted","Data":"164fbc8ef85de5b57053d6d4318ee1e9cd3e703d17563bc03217ddd352eba8d9"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.793855 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/1.log" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.794468 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/0.log" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.796128 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.796163 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.796175 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.796196 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.796208 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.797654 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5" exitCode=1 Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.797695 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.797760 4941 scope.go:117] "RemoveContainer" containerID="5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.798623 4941 scope.go:117] "RemoveContainer" containerID="a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5" Nov 30 06:46:54 crc kubenswrapper[4941]: E1130 06:46:54.798886 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.813702 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.827208 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.839755 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.852892 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.863463 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.879617 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.895389 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.900059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.900103 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.900115 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.900135 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.900171 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:54Z","lastTransitionTime":"2025-11-30T06:46:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.907624 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.917720 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.933449 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.954178 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.972629 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:54 crc kubenswrapper[4941]: I1130 06:46:54.990718 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:54Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.002731 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.002807 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.002821 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.002844 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.002868 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.015418 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.029754 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.105995 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.106067 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.106086 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.106117 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.106139 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.208730 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.208783 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.208794 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.208816 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.208827 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.311573 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.311636 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.311649 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.311671 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.311686 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.414462 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.414532 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.414548 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.414574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.414589 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.517443 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.517504 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.517574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.517599 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.517612 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.520760 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:55 crc kubenswrapper[4941]: E1130 06:46:55.520909 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.620834 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.621313 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.621352 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.621382 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.621399 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.725499 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.725563 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.725584 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.725615 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.725639 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.804488 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" event={"ID":"e42ab3a0-995b-4132-af38-66b45838b5b2","Type":"ContainerStarted","Data":"448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.804569 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" event={"ID":"e42ab3a0-995b-4132-af38-66b45838b5b2","Type":"ContainerStarted","Data":"f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.806956 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/1.log" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.826313 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.828753 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.828826 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.828846 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.828878 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.828917 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.847763 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.873247 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.893065 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.915462 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.940658 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.940714 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.940728 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.940752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.940767 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:55Z","lastTransitionTime":"2025-11-30T06:46:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.956434 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:55 crc kubenswrapper[4941]: I1130 06:46:55.985888 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:55Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.005372 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.021254 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.037246 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.042954 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.042983 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.042993 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.043010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.043023 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.052687 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.065480 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.076456 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.091580 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.104412 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.159787 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-vwfsk"] Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.160455 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.160545 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.162075 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.162148 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.162170 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.162202 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.162225 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.176085 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.190135 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.220054 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.238475 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.253056 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.261234 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.261287 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsr5b\" (UniqueName: \"kubernetes.io/projected/ba34d142-c6e9-45bd-93a4-cf8e15558381-kube-api-access-gsr5b\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.265541 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.265590 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.265602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.265619 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.265631 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.272823 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.294114 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.310431 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.331318 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.348767 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.369925 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.370086 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsr5b\" (UniqueName: \"kubernetes.io/projected/ba34d142-c6e9-45bd-93a4-cf8e15558381-kube-api-access-gsr5b\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.370271 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.370653 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.370805 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:56.870770095 +0000 UTC m=+37.638941744 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.372631 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.372690 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.372712 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.372743 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.372763 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.396139 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.398181 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsr5b\" (UniqueName: \"kubernetes.io/projected/ba34d142-c6e9-45bd-93a4-cf8e15558381-kube-api-access-gsr5b\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.413004 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.424977 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.442130 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.456554 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:56Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.475848 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.475928 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.475948 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.475977 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.475997 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.520693 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.520829 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.520899 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.521084 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.579760 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.579819 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.579830 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.579853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.579886 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.684506 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.684570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.684583 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.684607 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.684621 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.788171 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.788243 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.788264 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.788292 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.788312 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.877519 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.877953 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:56 crc kubenswrapper[4941]: E1130 06:46:56.878138 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:57.878089735 +0000 UTC m=+38.646261484 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.891230 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.891288 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.891310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.891391 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.891418 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.993966 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.994014 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.994027 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.994046 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:56 crc kubenswrapper[4941]: I1130 06:46:56.994060 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:56Z","lastTransitionTime":"2025-11-30T06:46:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.097189 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.097254 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.097270 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.097299 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.097318 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.200441 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.200513 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.200532 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.200559 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.200579 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.303032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.303108 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.303133 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.303170 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.303194 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.406007 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.406046 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.406055 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.406070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.406079 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.508568 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.508649 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.508670 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.508704 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.508724 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.521030 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.521078 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:57 crc kubenswrapper[4941]: E1130 06:46:57.521278 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:57 crc kubenswrapper[4941]: E1130 06:46:57.521515 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.612196 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.612267 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.612289 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.612310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.612337 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.715663 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.715844 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.715866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.715889 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.715906 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.818537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.818601 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.818618 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.818644 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.818660 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.888854 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:57 crc kubenswrapper[4941]: E1130 06:46:57.889061 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:57 crc kubenswrapper[4941]: E1130 06:46:57.889203 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:46:59.889168695 +0000 UTC m=+40.657340344 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.922088 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.922138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.922156 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.922180 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:57 crc kubenswrapper[4941]: I1130 06:46:57.922199 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:57Z","lastTransitionTime":"2025-11-30T06:46:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.025481 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.025550 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.025590 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.025623 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.025650 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.129388 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.129435 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.129453 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.129476 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.129494 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.233111 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.233209 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.233230 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.233256 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.233277 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.336932 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.336981 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.336991 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.337010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.337021 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.440523 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.440615 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.440637 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.440668 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.440693 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.521306 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.521417 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:46:58 crc kubenswrapper[4941]: E1130 06:46:58.521653 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:46:58 crc kubenswrapper[4941]: E1130 06:46:58.521897 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.543825 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.543888 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.543907 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.543936 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.543957 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.647805 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.647874 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.647894 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.647928 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.647950 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.751847 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.751943 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.751968 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.752005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.752032 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.854550 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.854599 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.854614 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.854637 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.854655 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.957474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.957507 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.957515 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.957530 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:58 crc kubenswrapper[4941]: I1130 06:46:58.957539 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:58Z","lastTransitionTime":"2025-11-30T06:46:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.060043 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.060185 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.060210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.060246 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.060270 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.163288 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.163366 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.163382 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.163403 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.163418 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.267155 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.267197 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.267232 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.267252 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.267266 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.370879 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.371011 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.371032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.371058 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.371077 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.473761 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.473828 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.473843 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.473863 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.473875 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.521250 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:59 crc kubenswrapper[4941]: E1130 06:46:59.521446 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.521749 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:46:59 crc kubenswrapper[4941]: E1130 06:46:59.521957 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.538399 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.559601 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.572776 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.577389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.577473 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.577488 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.577545 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.577561 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.591220 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.606415 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.624599 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.636925 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.652923 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.669498 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.680215 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.680267 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.680292 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.680321 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.680369 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.684255 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.700038 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.710386 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.722447 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.733122 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.752810 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a0edf2ac580ad3f7d29406278317ccea337e55563a7f4ccfe9ac14a7dae586d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:51Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:51.285724 6257 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1130 06:46:51.285749 6257 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1130 06:46:51.285769 6257 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:51.285791 6257 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:51.285818 6257 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:51.285835 6257 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1130 06:46:51.285840 6257 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1130 06:46:51.285875 6257 factory.go:656] Stopping watch factory\\\\nI1130 06:46:51.285893 6257 ovnkube.go:599] Stopped ovnkube\\\\nI1130 06:46:51.285929 6257 handler.go:208] Removed *v1.Node event handler 2\\\\nI1130 06:46:51.285941 6257 handler.go:208] Removed *v1.Node event handler 7\\\\nI1130 06:46:51.285949 6257 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1130 06:46:51.285956 6257 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:51.285962 6257 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:51.285969 6257 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1130 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.766427 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:46:59Z is after 2025-08-24T17:21:41Z" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.783029 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.783079 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.783092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.783111 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.783124 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.885222 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.885281 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.885304 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.885378 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.885409 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.911276 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:46:59 crc kubenswrapper[4941]: E1130 06:46:59.911502 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:59 crc kubenswrapper[4941]: E1130 06:46:59.911615 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:03.911582115 +0000 UTC m=+44.679753764 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.989019 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.989073 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.989090 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.989113 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:46:59 crc kubenswrapper[4941]: I1130 06:46:59.989131 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:46:59Z","lastTransitionTime":"2025-11-30T06:46:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.092052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.092119 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.092138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.092166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.092185 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.196085 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.196140 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.196151 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.196174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.196187 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.298993 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.299320 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.299586 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.299750 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.299885 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.403564 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.403707 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.403723 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.403748 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.403769 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.506885 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.506939 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.506952 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.506973 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.506986 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.521444 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.521600 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:00 crc kubenswrapper[4941]: E1130 06:47:00.521757 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:00 crc kubenswrapper[4941]: E1130 06:47:00.521993 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.609602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.609662 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.609679 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.609706 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.609722 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.713426 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.713501 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.713534 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.713567 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.713589 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.816987 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.817048 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.817063 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.817083 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.817097 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.920379 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.920436 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.920449 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.920471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:00 crc kubenswrapper[4941]: I1130 06:47:00.920485 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:00Z","lastTransitionTime":"2025-11-30T06:47:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.023132 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.023178 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.023190 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.023217 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.023231 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.125971 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.126067 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.126092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.126120 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.126140 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.228577 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.228616 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.228625 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.228640 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.228652 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.331088 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.331144 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.331162 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.331186 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.331204 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.434872 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.434942 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.434979 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.435022 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.435049 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.521600 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.521698 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.521885 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.522091 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.537799 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.537851 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.537869 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.537894 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.537913 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.622787 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.622825 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.622833 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.622849 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.622857 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.642707 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:01Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.647716 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.647831 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.648280 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.648379 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.648417 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.663582 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:01Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.667975 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.668072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.668103 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.668141 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.668166 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.688898 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:01Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.693853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.693952 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.693969 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.693993 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.694007 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.712483 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:01Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.717746 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.717786 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.717799 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.717821 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.717835 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.730604 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:01Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:01 crc kubenswrapper[4941]: E1130 06:47:01.730768 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.732765 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.732835 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.732892 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.732921 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.732940 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.835181 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.835304 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.835372 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.835409 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.835434 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.939668 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.939736 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.939760 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.939789 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:01 crc kubenswrapper[4941]: I1130 06:47:01.940450 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:01Z","lastTransitionTime":"2025-11-30T06:47:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.044378 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.044434 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.044459 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.044491 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.044515 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.147161 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.147214 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.147228 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.147249 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.147265 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.250694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.250780 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.250798 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.250826 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.250843 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.355602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.355671 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.355688 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.355720 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.355737 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.458920 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.459309 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.459471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.459605 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.459720 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.520970 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:02 crc kubenswrapper[4941]: E1130 06:47:02.521147 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.520989 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:02 crc kubenswrapper[4941]: E1130 06:47:02.521656 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.563047 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.563112 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.563130 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.563166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.563184 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.667010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.667090 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.667116 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.667166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.667189 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.770923 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.771003 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.771026 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.771058 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.771077 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.875012 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.875089 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.875108 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.875138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.875156 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.978618 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.978676 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.978693 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.978715 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:02 crc kubenswrapper[4941]: I1130 06:47:02.978733 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:02Z","lastTransitionTime":"2025-11-30T06:47:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.082610 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.082697 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.082721 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.082753 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.082778 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.185951 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.186016 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.186039 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.186070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.186092 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.289478 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.289600 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.289624 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.289655 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.289680 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.392881 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.392935 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.392947 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.392965 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.392979 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.496395 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.496440 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.496452 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.496474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.496489 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.521067 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.521068 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:03 crc kubenswrapper[4941]: E1130 06:47:03.521236 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:03 crc kubenswrapper[4941]: E1130 06:47:03.521402 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.599996 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.600059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.600077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.600105 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.600126 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.704032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.704108 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.704126 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.704153 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.704171 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.807831 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.807900 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.807918 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.807947 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.807967 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.911283 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.911348 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.911360 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.911379 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.911393 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:03Z","lastTransitionTime":"2025-11-30T06:47:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:03 crc kubenswrapper[4941]: I1130 06:47:03.961255 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:03 crc kubenswrapper[4941]: E1130 06:47:03.961535 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:47:03 crc kubenswrapper[4941]: E1130 06:47:03.961669 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:11.961636678 +0000 UTC m=+52.729808297 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.015002 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.015045 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.015057 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.015077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.015090 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.118565 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.118635 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.118657 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.118693 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.118721 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.222489 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.222542 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.222553 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.222573 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.222584 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.326041 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.326118 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.326137 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.326168 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.326191 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.429814 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.429936 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.429963 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.429993 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.430014 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.520857 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.521007 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:04 crc kubenswrapper[4941]: E1130 06:47:04.521021 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:04 crc kubenswrapper[4941]: E1130 06:47:04.521228 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.537764 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.537842 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.537866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.537898 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.537927 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.641980 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.642059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.642078 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.642106 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.642127 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.745413 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.745470 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.745479 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.745494 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.745507 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.848031 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.848565 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.848583 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.848613 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.848632 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.952270 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.952390 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.952412 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.952442 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:04 crc kubenswrapper[4941]: I1130 06:47:04.952463 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:04Z","lastTransitionTime":"2025-11-30T06:47:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.056642 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.056751 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.056773 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.056802 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.056822 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.160759 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.160821 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.160834 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.160858 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.160875 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.264873 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.264929 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.264946 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.264972 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.264989 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.368419 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.368508 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.368524 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.368553 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.368569 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.471929 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.471980 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.471989 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.472006 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.472019 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.521642 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.521663 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:05 crc kubenswrapper[4941]: E1130 06:47:05.521821 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:05 crc kubenswrapper[4941]: E1130 06:47:05.522003 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.574952 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.575036 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.575055 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.575113 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.575136 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.678478 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.678525 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.678539 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.678564 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.678579 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.781815 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.781883 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.781905 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.781937 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.781958 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.885053 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.885097 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.885107 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.885123 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.885134 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.987996 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.988105 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.988203 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.988233 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:05 crc kubenswrapper[4941]: I1130 06:47:05.988251 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:05Z","lastTransitionTime":"2025-11-30T06:47:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.092410 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.092489 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.092515 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.092548 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.092576 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.195766 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.195842 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.195867 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.195901 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.195925 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.299354 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.299422 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.299449 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.299482 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.299504 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.404811 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.404881 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.404900 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.404927 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.404946 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.508751 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.508807 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.508817 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.508845 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.508865 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.521117 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.521214 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:06 crc kubenswrapper[4941]: E1130 06:47:06.521352 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:06 crc kubenswrapper[4941]: E1130 06:47:06.521528 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.612309 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.612401 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.612422 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.612451 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.612468 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.715920 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.715994 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.716011 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.716051 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.716075 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.819716 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.819788 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.819806 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.819834 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.819852 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.922960 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.923034 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.923052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.923082 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:06 crc kubenswrapper[4941]: I1130 06:47:06.923101 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:06Z","lastTransitionTime":"2025-11-30T06:47:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.026949 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.027015 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.027036 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.027066 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.027085 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.130021 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.130092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.130110 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.130142 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.130160 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.233983 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.234052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.234070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.234099 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.234118 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.337960 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.338034 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.338053 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.338081 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.338101 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.442442 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.442528 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.442561 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.442598 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.442623 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.521533 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.521666 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:07 crc kubenswrapper[4941]: E1130 06:47:07.521768 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:07 crc kubenswrapper[4941]: E1130 06:47:07.521951 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.547051 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.547124 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.547145 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.547187 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.547209 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.650258 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.650315 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.650340 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.650359 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.650374 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.753448 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.753525 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.753547 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.753578 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.753596 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.856870 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.856912 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.856922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.856936 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.856946 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.960072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.960189 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.960220 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.960253 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:07 crc kubenswrapper[4941]: I1130 06:47:07.960277 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:07Z","lastTransitionTime":"2025-11-30T06:47:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.064359 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.064428 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.064446 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.064474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.064494 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.167986 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.168045 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.168061 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.168085 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.168100 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.271578 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.271651 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.271665 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.271682 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.271697 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.375135 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.375197 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.375211 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.375230 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.375241 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.478247 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.478294 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.478314 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.478369 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.478385 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.521363 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.521438 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:08 crc kubenswrapper[4941]: E1130 06:47:08.521489 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:08 crc kubenswrapper[4941]: E1130 06:47:08.521641 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.522744 4941 scope.go:117] "RemoveContainer" containerID="a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.545783 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.560563 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.582129 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.582164 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.582172 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.582187 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.582197 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.589082 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.604791 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.620246 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.630860 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.648860 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.670914 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.685307 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.685401 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.685423 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.685452 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.685470 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.696441 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.711823 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.727248 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.743036 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.758404 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.773534 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.788718 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.788838 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.788853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.788874 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.788890 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.794522 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.808699 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.863814 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/1.log" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.866421 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.866874 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.892083 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.892127 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.892140 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.892161 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.892173 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.896896 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.910697 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.933434 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.953827 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.975623 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.993076 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:08Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.994175 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.994215 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.994228 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.994248 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:08 crc kubenswrapper[4941]: I1130 06:47:08.994261 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:08Z","lastTransitionTime":"2025-11-30T06:47:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.008168 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.020272 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.036807 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.049882 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.063472 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.077494 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.093565 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.096071 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.097029 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.097072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.097080 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.097097 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.097108 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.112543 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.113636 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.129727 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.143080 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.160890 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.176143 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.196280 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.200070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.200104 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.200118 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.200133 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.200142 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.223075 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.234155 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.247026 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.259712 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.273706 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.287994 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.303575 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.303650 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.303698 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.303709 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.303728 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.303740 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.319574 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.335798 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.347553 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.358972 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.369755 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.386971 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.400743 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.406591 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.406646 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.406664 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.406690 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.406709 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.509884 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.509985 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.510041 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.510076 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.510097 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.521125 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:09 crc kubenswrapper[4941]: E1130 06:47:09.521306 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.521394 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:09 crc kubenswrapper[4941]: E1130 06:47:09.521567 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.548943 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.568077 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.585055 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.602174 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.613049 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.613124 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.613151 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.613188 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.613215 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.617845 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.639259 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.652824 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.668408 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.684624 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.704813 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.718771 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.718847 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.718873 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.718906 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.718931 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.730685 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.749076 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.763149 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.784654 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.798592 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.815557 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.824148 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.824210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.824233 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.824265 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.824289 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.833187 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.872200 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/2.log" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.872888 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/1.log" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.877263 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1" exitCode=1 Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.877402 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.877528 4941 scope.go:117] "RemoveContainer" containerID="a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.879826 4941 scope.go:117] "RemoveContainer" containerID="3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1" Nov 30 06:47:09 crc kubenswrapper[4941]: E1130 06:47:09.880225 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.894571 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.922543 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.930446 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.930524 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.930542 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.930573 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.930592 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:09Z","lastTransitionTime":"2025-11-30T06:47:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.937935 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.959796 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.979585 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:09 crc kubenswrapper[4941]: I1130 06:47:09.994609 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.024588 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a5f2c05e0404f493ed84a763cdd651fadc7dd97cc05d7c5ae25443905067faf5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:46:53Z\\\",\\\"message\\\":\\\"-go/informers/factory.go:160\\\\nI1130 06:46:52.704303 6385 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704365 6385 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1130 06:46:52.704289 6385 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704310 6385 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.704374 6385 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1130 06:46:52.705217 6385 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1130 06:46:52.705241 6385 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1130 06:46:52.705257 6385 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1130 06:46:52.705363 6385 factory.go:656] Stopping watch factory\\\\nI1130 06:46:52.705381 6385 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1130 06:46:52.705390 6385 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1130 06:46:52.705399 6385 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.034229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.034309 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.034351 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.034382 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.034395 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.044725 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.060020 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.078242 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.098764 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.117843 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.142710 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.144059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.144107 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.144127 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.144155 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.144174 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.184622 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.218775 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.231646 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.244064 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.246756 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.246786 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.246799 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.246818 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.246831 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.348826 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.348868 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.348877 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.348897 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.348907 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.440033 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.440227 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:47:42.440191971 +0000 UTC m=+83.208363590 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.440716 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.440839 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.440917 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.440985 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.440856 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441171 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441224 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441295 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441345 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.440954 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441444 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441210 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:42.441162191 +0000 UTC m=+83.209333800 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441488 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441504 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:42.441485981 +0000 UTC m=+83.209657640 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441525 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:42.441514802 +0000 UTC m=+83.209686521 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.441561 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:42.441541962 +0000 UTC m=+83.209713611 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.451089 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.451171 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.451191 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.451214 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.451232 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.521087 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.521088 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.521392 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.521515 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.555064 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.555122 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.555134 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.555155 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.555168 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.659004 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.659071 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.659090 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.659118 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.659142 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.762206 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.762289 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.762315 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.762386 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.762411 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.864975 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.865225 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.865473 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.865719 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.865917 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.883667 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/2.log" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.888828 4941 scope.go:117] "RemoveContainer" containerID="3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1" Nov 30 06:47:10 crc kubenswrapper[4941]: E1130 06:47:10.889101 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.911140 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.930104 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.950386 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.970965 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.971026 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.971043 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.971071 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.971091 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:10Z","lastTransitionTime":"2025-11-30T06:47:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.973727 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:10 crc kubenswrapper[4941]: I1130 06:47:10.995581 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:10Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.013443 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.033737 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.051394 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.072090 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.073874 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.073942 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.073964 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.073990 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.074009 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.097872 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.120212 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.136125 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.153818 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.173444 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.177408 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.177474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.177497 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.177528 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.177552 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.194646 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.215312 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.235108 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:11Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.280850 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.280904 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.280923 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.280952 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.280971 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.384448 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.384513 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.384531 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.384562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.384588 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.493913 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.493970 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.493982 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.494002 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.494017 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.521533 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:11 crc kubenswrapper[4941]: E1130 06:47:11.521668 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.521698 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:11 crc kubenswrapper[4941]: E1130 06:47:11.522050 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.597849 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.597906 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.597926 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.597956 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.597976 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.701911 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.701981 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.701998 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.702027 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.702047 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.805609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.805691 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.805710 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.805742 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.805765 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.909423 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.909818 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.909835 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.909860 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:11 crc kubenswrapper[4941]: I1130 06:47:11.909881 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:11Z","lastTransitionTime":"2025-11-30T06:47:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.012936 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.013015 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.013035 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.013069 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.013096 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.014926 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.014987 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.015035 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.015072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.015102 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.036606 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:12Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.043304 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.043417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.043442 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.043475 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.043504 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.061182 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.061381 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.061469 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:47:28.061440264 +0000 UTC m=+68.829611893 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.066969 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:12Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.073027 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.073095 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.073121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.073154 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.073178 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.094857 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:12Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.100008 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.100064 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.100077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.100097 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.100108 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.120194 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:12Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.125731 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.125776 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.125787 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.125806 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.125817 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.145629 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:12Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.145760 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.149034 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.149082 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.149100 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.149129 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.149148 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.253386 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.253689 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.253797 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.253870 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.253928 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.357843 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.358164 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.358261 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.358376 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.358481 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.460891 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.460950 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.460962 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.460982 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.460994 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.521684 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.521790 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.521934 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:12 crc kubenswrapper[4941]: E1130 06:47:12.522188 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.564134 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.564193 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.564210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.564237 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.564257 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.668174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.668223 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.668235 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.668257 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.668270 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.775788 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.775897 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.775937 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.776092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.776114 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.879593 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.879671 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.879691 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.879722 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.879741 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.983543 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.983622 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.983650 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.983684 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:12 crc kubenswrapper[4941]: I1130 06:47:12.983709 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:12Z","lastTransitionTime":"2025-11-30T06:47:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.087001 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.087088 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.087109 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.087140 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.087159 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.190570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.190666 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.190686 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.190716 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.190736 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.294073 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.294141 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.294161 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.294204 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.294223 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.398459 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.398548 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.398568 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.398596 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.398614 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.502106 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.502168 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.502186 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.502213 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.502231 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.521691 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.521742 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:13 crc kubenswrapper[4941]: E1130 06:47:13.521866 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:13 crc kubenswrapper[4941]: E1130 06:47:13.522013 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.605568 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.605642 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.605661 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.605690 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.605710 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.709447 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.709512 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.709530 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.709557 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.709576 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.812579 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.812639 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.812657 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.812685 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.812703 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.916053 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.916135 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.916158 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.916191 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:13 crc kubenswrapper[4941]: I1130 06:47:13.916217 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:13Z","lastTransitionTime":"2025-11-30T06:47:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.019993 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.020074 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.020099 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.020130 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.020150 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.123909 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.123981 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.124005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.124038 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.124056 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.227966 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.228052 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.228072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.228101 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.228121 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.331246 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.331316 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.331383 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.331411 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.331430 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.435287 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.435396 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.435417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.435447 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.435477 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.520783 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.520852 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:14 crc kubenswrapper[4941]: E1130 06:47:14.521118 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:14 crc kubenswrapper[4941]: E1130 06:47:14.521266 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.538732 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.538800 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.538818 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.538845 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.538864 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.642480 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.642550 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.642567 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.642594 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.642614 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.745764 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.745847 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.745867 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.745898 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.745916 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.848606 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.848695 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.848714 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.848748 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.848769 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.952536 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.952617 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.952637 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.952664 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:14 crc kubenswrapper[4941]: I1130 06:47:14.952684 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:14Z","lastTransitionTime":"2025-11-30T06:47:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.056752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.056822 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.056846 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.056893 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.056917 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.165532 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.165606 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.165624 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.165654 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.165675 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.268904 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.268974 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.269000 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.269032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.269052 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.372967 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.373030 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.373047 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.373074 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.373094 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.477058 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.477120 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.477133 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.477155 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.477170 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.520989 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.521053 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:15 crc kubenswrapper[4941]: E1130 06:47:15.521149 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:15 crc kubenswrapper[4941]: E1130 06:47:15.521230 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.580373 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.580412 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.580422 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.580438 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.580452 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.683816 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.683876 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.683894 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.683921 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.683939 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.786511 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.786557 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.786565 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.786580 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.786592 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.890047 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.890105 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.890119 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.890140 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.890153 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.993268 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.993318 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.993349 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.993370 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:15 crc kubenswrapper[4941]: I1130 06:47:15.993380 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:15Z","lastTransitionTime":"2025-11-30T06:47:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.095990 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.096059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.096068 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.096083 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.096096 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.198589 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.198647 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.198658 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.198678 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.198690 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.302094 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.302142 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.302153 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.302172 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.302208 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.405429 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.405487 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.405500 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.405517 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.405526 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.508682 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.508720 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.508728 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.508742 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.508754 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.521566 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.521621 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:16 crc kubenswrapper[4941]: E1130 06:47:16.521914 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:16 crc kubenswrapper[4941]: E1130 06:47:16.522018 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.612217 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.612375 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.612395 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.612420 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.612438 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.716768 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.717048 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.717068 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.717095 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.717114 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.819584 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.819683 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.819701 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.819729 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.819743 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.922243 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.922317 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.922393 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.922436 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:16 crc kubenswrapper[4941]: I1130 06:47:16.922465 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:16Z","lastTransitionTime":"2025-11-30T06:47:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.025921 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.025985 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.026010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.026043 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.026073 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.130056 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.130122 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.130145 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.130173 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.130193 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.233862 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.233955 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.233983 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.234021 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.234045 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.337887 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.337965 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.337982 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.338010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.338027 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.441770 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.441821 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.441835 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.441857 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.441872 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.520935 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.521023 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:17 crc kubenswrapper[4941]: E1130 06:47:17.521144 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:17 crc kubenswrapper[4941]: E1130 06:47:17.521350 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.545050 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.545139 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.545164 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.545197 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.545221 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.648366 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.648465 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.648484 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.648515 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.648535 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.751814 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.751886 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.751905 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.751935 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.751954 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.855070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.855162 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.855188 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.855227 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.855253 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.958660 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.958745 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.958767 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.958795 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:17 crc kubenswrapper[4941]: I1130 06:47:17.958816 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:17Z","lastTransitionTime":"2025-11-30T06:47:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.062261 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.062396 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.062426 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.062465 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.062489 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.165981 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.166055 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.166082 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.166118 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.166143 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.269188 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.269249 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.269278 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.269308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.269380 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.372989 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.373060 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.373080 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.373113 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.373135 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.476441 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.476526 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.476591 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.476620 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.476640 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.520948 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.521069 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:18 crc kubenswrapper[4941]: E1130 06:47:18.521166 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:18 crc kubenswrapper[4941]: E1130 06:47:18.521383 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.580447 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.580514 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.580537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.580598 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.580621 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.684077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.684143 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.684163 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.684198 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.684217 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.787607 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.787674 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.787694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.787726 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.787746 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.890478 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.890552 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.890575 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.890602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.890621 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.994366 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.994434 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.994452 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.994484 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:18 crc kubenswrapper[4941]: I1130 06:47:18.994506 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:18Z","lastTransitionTime":"2025-11-30T06:47:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.097363 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.097416 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.097436 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.097456 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.097471 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.200970 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.201032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.201047 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.201073 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.201094 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.304396 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.304461 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.304479 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.304506 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.304526 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.408562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.408702 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.408722 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.408748 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.408765 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.510931 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.511420 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.511443 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.511477 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.511496 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.521213 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.521404 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:19 crc kubenswrapper[4941]: E1130 06:47:19.521658 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:19 crc kubenswrapper[4941]: E1130 06:47:19.521817 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.541701 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.554228 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.572638 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.586449 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.604940 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.613493 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.613524 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.613532 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.613550 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.613563 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.619736 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.641409 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.659084 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.675390 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.689594 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.707877 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.716152 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.716186 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.716198 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.716218 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.716231 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.728224 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.743045 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.759633 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.769091 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.782053 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.803279 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:19Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.818580 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.818658 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.818770 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.818797 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.819171 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.922291 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.922597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.922660 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.922731 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:19 crc kubenswrapper[4941]: I1130 06:47:19.922793 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:19Z","lastTransitionTime":"2025-11-30T06:47:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.027548 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.027598 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.027607 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.027628 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.027664 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.131440 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.131856 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.132095 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.132286 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.132492 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.235445 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.235579 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.235602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.235629 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.235780 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.339035 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.339117 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.339145 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.339182 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.339216 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.442610 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.442663 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.442673 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.442693 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.442704 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.520837 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.520846 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:20 crc kubenswrapper[4941]: E1130 06:47:20.521059 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:20 crc kubenswrapper[4941]: E1130 06:47:20.521172 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.546434 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.546489 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.546510 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.546537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.546557 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.650116 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.650180 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.650274 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.650299 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.650316 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.753700 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.753764 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.753779 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.753800 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.753814 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.857198 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.857275 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.857295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.857358 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.857380 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.961799 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.961865 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.961889 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.961914 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:20 crc kubenswrapper[4941]: I1130 06:47:20.961930 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:20Z","lastTransitionTime":"2025-11-30T06:47:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.065978 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.066044 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.066057 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.066077 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.066099 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.169483 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.169572 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.169587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.169607 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.169623 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.273143 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.273225 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.273251 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.273289 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.273314 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.377294 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.377390 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.377419 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.377451 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.377475 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.480438 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.480497 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.480517 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.480547 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.480569 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.520982 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.521219 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:21 crc kubenswrapper[4941]: E1130 06:47:21.521390 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:21 crc kubenswrapper[4941]: E1130 06:47:21.521556 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.583838 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.583888 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.583905 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.583930 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.583948 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.687455 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.687519 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.687539 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.687569 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.687589 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.791604 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.791655 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.791672 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.791698 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.791716 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.895010 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.895072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.895095 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.895127 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.895150 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.998389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.998450 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.998474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.998506 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:21 crc kubenswrapper[4941]: I1130 06:47:21.998528 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:21Z","lastTransitionTime":"2025-11-30T06:47:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.101587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.101652 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.101672 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.101696 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.101713 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.205647 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.205723 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.205750 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.205783 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.205806 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.308728 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.308787 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.308808 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.308831 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.308851 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.411474 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.411508 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.411518 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.411541 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.411556 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.448114 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.448194 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.448220 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.448255 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.448278 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.472992 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:22Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.477858 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.477931 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.477954 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.477982 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.478003 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.498732 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:22Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.504160 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.504230 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.504252 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.504282 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.504302 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.519776 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:22Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.520742 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.520773 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.520956 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.521280 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.525281 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.525436 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.525525 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.525611 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.525691 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.552610 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:22Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.557420 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.557707 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.557853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.558021 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.558173 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.574993 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:22Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:22 crc kubenswrapper[4941]: E1130 06:47:22.575595 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.578587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.578651 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.578673 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.578700 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.578720 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.682463 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.682545 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.682564 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.682593 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.682612 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.786413 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.786481 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.786500 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.786531 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.786550 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.890210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.890837 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.890917 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.891003 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.891089 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.993846 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.993883 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.993892 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.993910 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:22 crc kubenswrapper[4941]: I1130 06:47:22.993919 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:22Z","lastTransitionTime":"2025-11-30T06:47:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.096852 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.096924 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.096950 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.097018 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.097226 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.200081 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.200121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.200130 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.200146 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.200157 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.302867 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.303175 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.303260 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.303344 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.303419 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.406407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.406691 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.406759 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.406841 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.406919 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.510164 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.510220 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.510238 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.510263 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.510280 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.521054 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.521114 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:23 crc kubenswrapper[4941]: E1130 06:47:23.521226 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:23 crc kubenswrapper[4941]: E1130 06:47:23.521860 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.522053 4941 scope.go:117] "RemoveContainer" containerID="3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1" Nov 30 06:47:23 crc kubenswrapper[4941]: E1130 06:47:23.522272 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.613895 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.614002 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.614023 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.614055 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.614077 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.717371 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.717422 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.717433 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.717451 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.717465 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.819975 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.820032 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.820049 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.820255 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.820272 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.923159 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.923218 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.923229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.923251 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:23 crc kubenswrapper[4941]: I1130 06:47:23.923263 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:23Z","lastTransitionTime":"2025-11-30T06:47:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.026308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.026389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.026407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.026435 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.026455 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.129318 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.129394 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.129410 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.129431 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.129453 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.233200 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.233580 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.233672 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.233771 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.233863 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.336036 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.336073 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.336083 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.336096 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.336107 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.439407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.439488 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.439509 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.439544 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.439570 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.521144 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.521187 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:24 crc kubenswrapper[4941]: E1130 06:47:24.521347 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:24 crc kubenswrapper[4941]: E1130 06:47:24.521507 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.542224 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.542276 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.542297 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.542348 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.542368 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.645650 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.645691 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.645701 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.645726 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.645737 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.748075 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.748124 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.748138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.748155 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.748175 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.850163 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.850196 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.850205 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.850220 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.850230 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.953589 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.953636 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.953646 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.953668 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:24 crc kubenswrapper[4941]: I1130 06:47:24.953684 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:24Z","lastTransitionTime":"2025-11-30T06:47:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.056973 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.057061 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.057078 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.057108 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.057124 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.161240 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.161295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.161308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.161349 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.161365 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.264498 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.264563 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.264586 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.264620 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.264639 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.367941 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.368004 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.368021 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.368041 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.368055 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.471894 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.471946 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.471956 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.471973 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.471982 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.522227 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.522307 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:25 crc kubenswrapper[4941]: E1130 06:47:25.523532 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:25 crc kubenswrapper[4941]: E1130 06:47:25.523684 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.575214 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.575352 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.575380 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.575420 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.575445 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.678087 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.678131 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.678144 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.678161 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.678173 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.781208 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.781254 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.781266 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.781283 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.781296 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.883877 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.883920 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.883931 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.883946 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.883962 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.987262 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.987310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.987338 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.987356 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:25 crc kubenswrapper[4941]: I1130 06:47:25.987368 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:25Z","lastTransitionTime":"2025-11-30T06:47:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.089588 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.089667 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.089702 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.089733 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.089760 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.193121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.193375 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.193389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.193413 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.193434 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.295919 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.295946 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.295955 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.295977 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.295987 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.397902 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.397926 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.397934 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.397947 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.397955 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.500879 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.500903 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.500912 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.500924 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.500936 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.521463 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.521573 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:26 crc kubenswrapper[4941]: E1130 06:47:26.521663 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:26 crc kubenswrapper[4941]: E1130 06:47:26.521882 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.603989 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.604054 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.604072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.604099 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.604119 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.707242 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.707281 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.707292 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.707310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.707344 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.809893 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.809931 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.809943 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.809961 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.810017 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.913509 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.913562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.913583 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.913608 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:26 crc kubenswrapper[4941]: I1130 06:47:26.913626 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:26Z","lastTransitionTime":"2025-11-30T06:47:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.016544 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.016597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.016609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.016631 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.016644 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.119059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.119106 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.119118 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.119134 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.119145 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.221432 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.221484 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.221500 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.221524 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.221541 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.324587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.324670 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.324693 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.324728 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.324757 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.428186 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.428245 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.428258 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.428278 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.428292 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.520787 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.520875 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:27 crc kubenswrapper[4941]: E1130 06:47:27.520951 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:27 crc kubenswrapper[4941]: E1130 06:47:27.521100 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.530701 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.530774 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.530793 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.530823 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.530842 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.632613 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.632642 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.632650 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.632663 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.632673 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.735667 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.735731 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.735749 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.735775 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.735792 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.838922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.839009 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.839031 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.839063 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.839085 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.942917 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.942988 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.943013 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.943042 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:27 crc kubenswrapper[4941]: I1130 06:47:27.943060 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:27Z","lastTransitionTime":"2025-11-30T06:47:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.045988 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.046033 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.046042 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.046061 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.046071 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.149306 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.149368 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.149380 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.149398 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.149411 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.159352 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:28 crc kubenswrapper[4941]: E1130 06:47:28.159570 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:47:28 crc kubenswrapper[4941]: E1130 06:47:28.159748 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:00.159693206 +0000 UTC m=+100.927864825 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.252584 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.252661 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.252685 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.252717 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.252739 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.355686 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.355724 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.355732 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.355747 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.355758 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.458603 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.458653 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.458664 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.458682 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.458694 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.520598 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.520690 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:28 crc kubenswrapper[4941]: E1130 06:47:28.520731 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:28 crc kubenswrapper[4941]: E1130 06:47:28.520865 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.560677 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.560704 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.560713 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.560725 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.560735 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.663496 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.663556 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.663574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.663599 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.663617 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.766630 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.766678 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.766692 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.766714 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.766726 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.869421 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.869463 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.869471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.869494 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.869515 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.953371 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/0.log" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.953432 4941 generic.go:334] "Generic (PLEG): container finished" podID="a2c22971-565b-44b0-9312-737c3931a558" containerID="475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4" exitCode=1 Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.953474 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerDied","Data":"475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.954068 4941 scope.go:117] "RemoveContainer" containerID="475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.972651 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.972727 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.972745 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.972768 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.972786 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:28Z","lastTransitionTime":"2025-11-30T06:47:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.978401 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:28Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:28 crc kubenswrapper[4941]: I1130 06:47:28.998616 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:28Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.020836 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.048193 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.067421 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.075686 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.075709 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.075723 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.075747 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.075762 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.085173 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.104403 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.121276 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.142305 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.161246 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.178160 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.178202 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.178215 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.178236 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.178251 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.181276 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.201043 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.215485 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.231269 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.243191 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.258964 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.271435 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.280790 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.280819 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.280830 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.280847 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.280859 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.383990 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.384050 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.384069 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.384100 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.384121 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.487686 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.487733 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.487746 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.487763 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.487776 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.521197 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.521268 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:29 crc kubenswrapper[4941]: E1130 06:47:29.521396 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:29 crc kubenswrapper[4941]: E1130 06:47:29.521515 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.548638 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.560416 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.579372 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.593540 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.593843 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.593864 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.593874 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.593891 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.593902 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.609753 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.624476 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.639559 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.672249 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.686536 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.697156 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.697189 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.697202 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.697223 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.697235 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.699215 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.711897 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.725225 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.740248 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.755521 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.769853 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.785471 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.799678 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.799735 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.799748 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.799769 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.799783 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.800909 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.902481 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.902532 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.902544 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.902565 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.902575 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:29Z","lastTransitionTime":"2025-11-30T06:47:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.958744 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/0.log" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.958802 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerStarted","Data":"72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4"} Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.976167 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:29 crc kubenswrapper[4941]: I1130 06:47:29.988970 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:29Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.004957 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.005080 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.005141 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.005204 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.005279 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.008946 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.020611 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.036294 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.050862 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.062368 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.088540 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.104456 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.113672 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.113861 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.113920 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.114017 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.114085 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.122732 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.135883 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.149657 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.163027 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.176462 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.186938 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.200447 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.212261 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:30Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.216229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.216284 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.216303 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.216361 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.216381 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.319302 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.319587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.319716 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.319837 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.319929 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.423477 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.423740 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.423809 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.423882 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.423971 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.521303 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.521425 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:30 crc kubenswrapper[4941]: E1130 06:47:30.521506 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:30 crc kubenswrapper[4941]: E1130 06:47:30.521636 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.526578 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.526659 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.526720 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.526781 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.526845 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.629466 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.629735 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.629823 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.629914 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.630003 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.732824 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.732866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.732881 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.732900 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.732910 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.835507 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.835537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.835547 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.835562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.835571 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.938663 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.938710 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.938722 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.938744 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:30 crc kubenswrapper[4941]: I1130 06:47:30.938756 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:30Z","lastTransitionTime":"2025-11-30T06:47:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.041169 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.041210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.041223 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.041241 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.041252 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.152733 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.153258 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.153341 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.153429 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.153512 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.257068 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.257133 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.257154 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.257182 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.257202 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.359918 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.359966 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.359982 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.360005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.360022 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.462766 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.462832 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.462849 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.462876 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.462895 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.521560 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.521610 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:31 crc kubenswrapper[4941]: E1130 06:47:31.521763 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:31 crc kubenswrapper[4941]: E1130 06:47:31.522127 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.565555 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.565601 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.565618 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.565642 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.565660 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.670281 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.670347 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.670363 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.670384 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.670397 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.772662 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.772693 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.772703 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.772741 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.772755 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.875265 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.875314 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.875393 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.875420 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.875442 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.978464 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.978908 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.979070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.979244 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:31 crc kubenswrapper[4941]: I1130 06:47:31.979428 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:31Z","lastTransitionTime":"2025-11-30T06:47:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.082922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.083021 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.083041 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.083070 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.083089 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.186101 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.186177 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.186197 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.186224 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.186239 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.289489 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.289947 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.290124 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.290358 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.290578 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.394808 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.394870 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.394885 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.394909 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.394925 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.497682 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.497742 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.497764 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.497799 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.497819 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.521769 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.521782 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.522111 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.522259 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.602294 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.602418 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.602443 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.602472 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.602492 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.706123 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.706207 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.706228 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.706261 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.706401 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.811086 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.811147 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.811166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.811192 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.811211 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.816940 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.816995 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.817011 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.817038 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.817057 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.839443 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:32Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.845314 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.845468 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.845497 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.845536 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.845563 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.864485 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:32Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.871231 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.871278 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.871291 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.871314 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.871344 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.890246 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:32Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.896574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.896680 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.896704 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.896737 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.896763 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.917737 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:32Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.924067 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.924116 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.924137 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.924169 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.924190 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.944905 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:32Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:32 crc kubenswrapper[4941]: E1130 06:47:32.945181 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.947862 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.947904 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.947922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.947945 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:32 crc kubenswrapper[4941]: I1130 06:47:32.947961 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:32Z","lastTransitionTime":"2025-11-30T06:47:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.051094 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.051166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.051187 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.051219 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.051240 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.154775 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.154850 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.154870 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.154903 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.154927 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.259147 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.259205 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.259222 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.259251 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.259270 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.362956 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.363026 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.363044 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.363086 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.363105 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.467767 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.467834 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.467854 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.467882 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.467901 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.520852 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.520873 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:33 crc kubenswrapper[4941]: E1130 06:47:33.521110 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:33 crc kubenswrapper[4941]: E1130 06:47:33.521254 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.570991 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.571061 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.571081 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.571118 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.571141 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.674227 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.674299 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.674318 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.674382 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.674402 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.777862 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.777917 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.777935 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.777960 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.777980 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.881471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.881531 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.881544 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.881566 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.881580 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.983855 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.983927 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.983943 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.983968 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:33 crc kubenswrapper[4941]: I1130 06:47:33.983992 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:33Z","lastTransitionTime":"2025-11-30T06:47:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.087576 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.087724 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.087755 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.087796 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.087826 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.191778 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.191871 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.191898 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.191933 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.191956 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.295973 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.296069 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.296090 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.296120 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.296144 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.399569 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.399645 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.399664 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.399691 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.399712 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.503621 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.503712 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.503736 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.503772 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.503801 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.521495 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.521530 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:34 crc kubenswrapper[4941]: E1130 06:47:34.521742 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:34 crc kubenswrapper[4941]: E1130 06:47:34.521875 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.607208 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.607287 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.607313 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.607394 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.607424 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.710912 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.710969 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.710990 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.711017 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.711036 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.814641 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.814703 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.814724 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.814751 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.814775 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.918682 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.918737 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.918746 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.918762 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:34 crc kubenswrapper[4941]: I1130 06:47:34.918772 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:34Z","lastTransitionTime":"2025-11-30T06:47:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.022059 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.022153 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.022183 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.022219 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.022244 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.126099 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.126163 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.126183 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.126209 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.126229 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.230186 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.230239 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.230250 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.230273 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.230287 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.334156 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.334259 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.334286 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.334321 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.334384 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.438496 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.438565 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.438583 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.438614 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.438633 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.521729 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.521847 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:35 crc kubenswrapper[4941]: E1130 06:47:35.521963 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:35 crc kubenswrapper[4941]: E1130 06:47:35.522060 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.542291 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.542360 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.542377 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.542401 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.542414 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.646125 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.646229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.646254 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.646291 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.646314 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.749297 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.749406 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.749428 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.749461 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.749485 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.852677 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.852745 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.852763 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.852800 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.852820 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.956467 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.956537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.956554 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.956589 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:35 crc kubenswrapper[4941]: I1130 06:47:35.956608 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:35Z","lastTransitionTime":"2025-11-30T06:47:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.059385 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.059865 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.059885 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.059913 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.059934 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.162706 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.162777 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.162797 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.162824 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.162842 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.266485 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.266547 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.266559 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.266580 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.266598 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.369834 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.369884 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.369900 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.369926 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.369943 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.473295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.473389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.473408 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.473433 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.473455 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.521786 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.521809 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:36 crc kubenswrapper[4941]: E1130 06:47:36.522674 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:36 crc kubenswrapper[4941]: E1130 06:47:36.522904 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.523296 4941 scope.go:117] "RemoveContainer" containerID="3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.544412 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.576994 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.577064 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.577078 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.577097 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.577109 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.680570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.680630 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.680649 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.680680 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.680701 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.783731 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.783800 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.783828 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.783859 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.783879 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.887242 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.887310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.887471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.887513 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.887539 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.987839 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/2.log" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.989602 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.989673 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.989699 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.989733 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.989760 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:36Z","lastTransitionTime":"2025-11-30T06:47:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.990684 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} Nov 30 06:47:36 crc kubenswrapper[4941]: I1130 06:47:36.991265 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.011371 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.025035 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a676d94-f6d4-42b2-9ea1-62ecda52e8b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096a8eff1b601167d5c16fea534c438fe1fc135e61418626766c8d434b9afe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.044304 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.058736 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.081044 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.092917 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.092976 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.092996 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.093020 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.093039 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.109734 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.130751 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.185566 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.196159 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.196204 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.196215 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.196232 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.196243 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.206173 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.224867 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.242128 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.256447 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.267454 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.281369 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.290556 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.299075 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.299104 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.299114 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.299134 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.299147 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.304521 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.317402 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.334817 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.401654 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.401695 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.401704 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.401721 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.401731 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.504364 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.504406 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.504415 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.504429 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.504439 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.521254 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.521363 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:37 crc kubenswrapper[4941]: E1130 06:47:37.521423 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:37 crc kubenswrapper[4941]: E1130 06:47:37.521525 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.607664 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.607710 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.607722 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.607742 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.607753 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.711364 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.711412 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.711422 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.711439 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.711451 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.814554 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.814619 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.814638 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.814666 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.814685 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.918634 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.918719 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.918739 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.918767 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.918793 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:37Z","lastTransitionTime":"2025-11-30T06:47:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.997731 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/3.log" Nov 30 06:47:37 crc kubenswrapper[4941]: I1130 06:47:37.999096 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/2.log" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.003498 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" exitCode=1 Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.003575 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.003640 4941 scope.go:117] "RemoveContainer" containerID="3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.004240 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:47:38 crc kubenswrapper[4941]: E1130 06:47:38.004443 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.022133 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.022174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.022185 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.022203 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.022214 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.028948 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.044959 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a676d94-f6d4-42b2-9ea1-62ecda52e8b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096a8eff1b601167d5c16fea534c438fe1fc135e61418626766c8d434b9afe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.068738 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d9ab40876b22a17d95dced11e06ee67b314b068641945c19d2aefa2c8096cf1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:09Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:09.405948 6607 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:37Z\\\",\\\"message\\\":\\\"d Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:37.618691 6963 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1130 06:47:37.618704 6963 services_controller.go:451] Built service openshift-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.37\\\\\\\", Port:443, Template:(*services.Te\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.086299 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.105650 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.117635 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.124495 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.124556 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.124574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.124603 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.124626 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.134913 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.155548 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.174006 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.187510 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.206402 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.217638 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.229163 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.229227 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.229242 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.229268 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.229285 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.230948 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.247451 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.270176 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.283401 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.296665 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.314596 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:38Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.332225 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.332319 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.332389 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.332415 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.332433 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.435404 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.435452 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.435464 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.435481 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.435495 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.520864 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:38 crc kubenswrapper[4941]: E1130 06:47:38.521044 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.521279 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:38 crc kubenswrapper[4941]: E1130 06:47:38.521375 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.538348 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.538418 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.538442 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.538472 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.538492 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.546533 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.641793 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.641840 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.641854 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.641873 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.641892 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.745295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.745397 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.745417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.745444 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.745463 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.848752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.848824 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.848843 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.848872 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.848895 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.951846 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.951981 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.952011 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.952048 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:38 crc kubenswrapper[4941]: I1130 06:47:38.952069 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:38Z","lastTransitionTime":"2025-11-30T06:47:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.010275 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/3.log" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.016579 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:47:39 crc kubenswrapper[4941]: E1130 06:47:39.017400 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.036867 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.055408 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.055552 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.055629 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.055694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.055797 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.057051 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.079351 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.097008 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.118290 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.138259 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.155805 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.159413 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.159505 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.159529 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.159562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.159585 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.173151 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.205661 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84de4c25-bc18-4219-8bb0-063e3c81bc4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://afbf53b573dcc7467bbdf678490cc227abc5dd89765e40b8896b7b99bc2b4091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b79c8384dd4ff0369ffb7fefab6c2d8fc6711fdf3e75b5c6204c1d9d8aee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c98b8812b61b751baa2e0b37c485c3e8e1ccd9314228a4b456d82d9726d2ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43293ea156b7c394ed916e9bb2407bf3b84ebc887636b56561283d751b682230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://389f098363a2ac6cd13000848fbec8fcc9f8124ec060b6f6875c570aebaaeab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.223555 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.238626 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.263638 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.264364 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.264473 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.264561 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.264652 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.264741 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.289022 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.313156 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.333052 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.349561 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.366719 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a676d94-f6d4-42b2-9ea1-62ecda52e8b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096a8eff1b601167d5c16fea534c438fe1fc135e61418626766c8d434b9afe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.368355 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.368396 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.368412 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.368436 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.368449 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.401556 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:37Z\\\",\\\"message\\\":\\\"d Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:37.618691 6963 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1130 06:47:37.618704 6963 services_controller.go:451] Built service openshift-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.37\\\\\\\", Port:443, Template:(*services.Te\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.420716 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.472114 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.472184 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.472203 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.472234 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.472254 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.521091 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:39 crc kubenswrapper[4941]: E1130 06:47:39.521293 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.521347 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:39 crc kubenswrapper[4941]: E1130 06:47:39.521520 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.542749 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.562143 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.576960 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.577039 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.577056 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.577084 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.577103 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.584426 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.610729 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.642494 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.665402 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.680454 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.680517 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.680539 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.680570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.680591 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.690223 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.713727 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.732011 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.767422 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84de4c25-bc18-4219-8bb0-063e3c81bc4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://afbf53b573dcc7467bbdf678490cc227abc5dd89765e40b8896b7b99bc2b4091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b79c8384dd4ff0369ffb7fefab6c2d8fc6711fdf3e75b5c6204c1d9d8aee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c98b8812b61b751baa2e0b37c485c3e8e1ccd9314228a4b456d82d9726d2ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43293ea156b7c394ed916e9bb2407bf3b84ebc887636b56561283d751b682230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://389f098363a2ac6cd13000848fbec8fcc9f8124ec060b6f6875c570aebaaeab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.784292 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.784397 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.784417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.784446 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.784466 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.788546 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.805887 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.833662 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.858889 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.881668 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.887789 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.887984 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.888119 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.888296 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.888519 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.906486 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.929202 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a676d94-f6d4-42b2-9ea1-62ecda52e8b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096a8eff1b601167d5c16fea534c438fe1fc135e61418626766c8d434b9afe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.963807 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:37Z\\\",\\\"message\\\":\\\"d Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:37.618691 6963 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1130 06:47:37.618704 6963 services_controller.go:451] Built service openshift-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.37\\\\\\\", Port:443, Template:(*services.Te\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.978356 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:39Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.992177 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.992223 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.992239 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.992260 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:39 crc kubenswrapper[4941]: I1130 06:47:39.992277 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:39Z","lastTransitionTime":"2025-11-30T06:47:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.095046 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.095125 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.095150 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.095182 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.095203 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.198541 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.198618 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.198641 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.198687 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.198708 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.301712 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.301772 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.301793 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.301818 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.301837 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.405762 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.405825 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.405845 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.405874 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.405896 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.509013 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.509093 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.509114 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.509154 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.509176 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.521177 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.521250 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:40 crc kubenswrapper[4941]: E1130 06:47:40.521399 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:40 crc kubenswrapper[4941]: E1130 06:47:40.521549 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.612637 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.612709 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.612731 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.612766 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.612789 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.717097 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.717184 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.717210 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.717245 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.717283 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.827803 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.827863 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.827878 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.827901 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.827918 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.931138 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.931225 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.931249 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.931288 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:40 crc kubenswrapper[4941]: I1130 06:47:40.931317 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:40Z","lastTransitionTime":"2025-11-30T06:47:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.034392 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.034456 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.034479 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.034505 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.034524 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.137694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.137753 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.137771 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.137795 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.137815 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.240926 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.241035 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.241063 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.241101 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.241131 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.344706 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.344782 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.344802 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.344853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.344874 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.447471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.447575 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.447597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.447622 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.447640 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.521320 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.521488 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:41 crc kubenswrapper[4941]: E1130 06:47:41.521571 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:41 crc kubenswrapper[4941]: E1130 06:47:41.521777 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.550166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.550222 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.550292 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.550407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.550433 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.653939 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.654062 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.654084 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.654108 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.654129 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.757752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.757840 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.757866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.757898 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.757921 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.861431 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.861497 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.861518 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.861545 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.861563 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.965057 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.965109 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.965121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.965145 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:41 crc kubenswrapper[4941]: I1130 06:47:41.965160 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:41Z","lastTransitionTime":"2025-11-30T06:47:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.068319 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.068408 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.068428 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.068454 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.068471 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.171934 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.171999 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.172019 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.172045 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.172057 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.274635 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.274684 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.274695 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.274720 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.274733 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.377304 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.377385 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.377402 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.377428 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.377446 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.479710 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.480126 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.480145 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.480176 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.480204 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.521241 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.521269 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.521514 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.521648 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.539717 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.539929 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.539903626 +0000 UTC m=+147.308075235 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.539988 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.540032 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.540062 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.540096 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540173 4941 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540224 4941 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540245 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.540219536 +0000 UTC m=+147.308391145 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540245 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540267 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540280 4941 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540313 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.540281718 +0000 UTC m=+147.308453367 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540373 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.54035622 +0000 UTC m=+147.308527869 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540449 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540485 4941 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540507 4941 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:42 crc kubenswrapper[4941]: E1130 06:47:42.540574 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.540550846 +0000 UTC m=+147.308722495 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.582914 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.582963 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.582978 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.582995 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.583007 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.685575 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.685670 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.685695 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.685724 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.685744 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.788180 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.788242 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.788265 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.788295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.788359 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.891459 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.891528 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.891551 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.891574 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.891588 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.995652 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.995718 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.995736 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.995761 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:42 crc kubenswrapper[4941]: I1130 06:47:42.995778 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:42Z","lastTransitionTime":"2025-11-30T06:47:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.099182 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.099270 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.099288 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.099316 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.099362 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.202709 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.202776 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.202794 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.202819 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.202837 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.221714 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.221792 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.221815 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.221854 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.221876 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.243579 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.250080 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.250143 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.250168 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.250200 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.250219 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.271526 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.278105 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.278170 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.278193 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.278224 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.278247 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.300416 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.305892 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.305952 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.305974 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.306005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.306026 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.326157 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.331221 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.331278 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.331304 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.331365 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.331391 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.352923 4941 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"105cacd9-725d-42a6-8952-62e3f6b69189\\\",\\\"systemUUID\\\":\\\"f889fd33-4d25-4c91-a88f-22995726b30c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:43Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.353140 4941 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.355633 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.355714 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.355740 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.355772 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.355808 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.459035 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.459083 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.459099 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.459123 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.459162 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.521358 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.521456 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.521552 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:43 crc kubenswrapper[4941]: E1130 06:47:43.521663 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.561902 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.561976 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.561994 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.562027 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.562046 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.665752 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.665812 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.665830 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.665853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.665871 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.769484 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.769549 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.769570 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.769598 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.769622 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.872473 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.872560 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.872642 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.872675 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.872699 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.975694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.975772 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.975794 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.975825 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:43 crc kubenswrapper[4941]: I1130 06:47:43.975844 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:43Z","lastTransitionTime":"2025-11-30T06:47:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.079212 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.079281 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.079301 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.079368 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.079413 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.188023 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.188092 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.188202 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.188229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.188243 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.292393 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.292815 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.292943 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.293081 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.293211 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.397034 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.397512 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.397657 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.397788 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.397910 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.501821 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.501884 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.501900 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.501927 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.501945 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.521654 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.521747 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:44 crc kubenswrapper[4941]: E1130 06:47:44.521899 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:44 crc kubenswrapper[4941]: E1130 06:47:44.522183 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.605273 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.605363 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.605382 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.605407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.605431 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.708953 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.709030 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.709056 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.709088 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.709112 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.812998 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.813121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.813190 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.813227 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.813251 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.916757 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.916823 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.916842 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.916871 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:44 crc kubenswrapper[4941]: I1130 06:47:44.916891 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:44Z","lastTransitionTime":"2025-11-30T06:47:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.020912 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.020988 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.021005 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.021029 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.021044 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.124831 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.125123 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.125142 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.125167 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.125185 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.230220 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.230301 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.230406 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.230446 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.230470 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.334550 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.334610 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.334635 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.334668 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.334692 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.438852 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.438896 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.438915 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.438942 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.438961 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.521639 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.521699 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:45 crc kubenswrapper[4941]: E1130 06:47:45.521897 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:45 crc kubenswrapper[4941]: E1130 06:47:45.522070 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.541527 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.541601 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.541625 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.541653 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.541673 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.645204 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.645259 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.645272 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.645295 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.645309 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.748906 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.748995 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.749022 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.749058 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.749081 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.853308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.853410 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.853430 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.853456 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.853474 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.956835 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.956917 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.956938 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.956970 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:45 crc kubenswrapper[4941]: I1130 06:47:45.956991 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:45Z","lastTransitionTime":"2025-11-30T06:47:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.059852 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.059888 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.059896 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.059914 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.059925 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.163130 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.163174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.163182 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.163201 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.163216 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.266022 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.266083 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.266101 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.266129 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.266147 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.370503 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.370564 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.370582 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.370649 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.370669 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.474525 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.474642 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.474666 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.474697 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.474719 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.521655 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.521728 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:46 crc kubenswrapper[4941]: E1130 06:47:46.521847 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:46 crc kubenswrapper[4941]: E1130 06:47:46.521973 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.578124 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.578196 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.578221 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.578251 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.578274 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.682219 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.682268 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.682285 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.682310 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.682365 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.786003 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.786058 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.786075 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.786101 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.786121 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.889425 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.889498 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.889539 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.889572 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.889596 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.992785 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.992855 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.992876 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.992903 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:46 crc kubenswrapper[4941]: I1130 06:47:46.992924 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:46Z","lastTransitionTime":"2025-11-30T06:47:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.096678 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.096739 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.096756 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.096782 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.096799 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.200012 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.200078 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.200096 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.200122 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.200141 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.303350 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.303438 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.303458 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.303489 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.303510 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.412208 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.412783 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.412811 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.412853 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.412878 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.516595 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.516684 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.516709 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.516747 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.516774 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.521520 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.521594 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:47 crc kubenswrapper[4941]: E1130 06:47:47.521768 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:47 crc kubenswrapper[4941]: E1130 06:47:47.522195 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.619899 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.619970 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.619989 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.620014 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.620033 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.722922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.723003 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.723030 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.723065 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.723090 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.827067 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.827121 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.827136 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.827164 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.827180 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.929685 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.929767 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.929782 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.929800 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:47 crc kubenswrapper[4941]: I1130 06:47:47.929812 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:47Z","lastTransitionTime":"2025-11-30T06:47:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.033119 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.033166 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.033178 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.033198 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.033213 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.136428 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.136501 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.136533 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.136585 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.136606 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.240150 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.240229 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.240254 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.240282 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.240307 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.343898 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.344558 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.344669 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.344713 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.344741 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.448412 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.448475 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.448493 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.448521 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.448546 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.521534 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.521562 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:48 crc kubenswrapper[4941]: E1130 06:47:48.521729 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:48 crc kubenswrapper[4941]: E1130 06:47:48.522014 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.551913 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.551962 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.551983 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.552009 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.552031 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.655983 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.656061 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.656087 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.656120 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.656146 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.759966 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.760046 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.760072 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.760107 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.760132 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.863384 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.863449 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.863466 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.863491 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.863515 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.967244 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.967307 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.967370 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.967407 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:48 crc kubenswrapper[4941]: I1130 06:47:48.967430 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:48Z","lastTransitionTime":"2025-11-30T06:47:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.070313 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.070424 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.070442 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.070473 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.070498 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.174685 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.174757 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.174779 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.174808 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.174829 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.278918 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.278996 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.279025 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.279062 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.279086 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.382597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.382665 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.382684 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.382713 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.382736 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.485782 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.485866 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.485893 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.485922 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.485941 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.521176 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:49 crc kubenswrapper[4941]: E1130 06:47:49.521403 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.521504 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:49 crc kubenswrapper[4941]: E1130 06:47:49.521796 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.545283 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d33b85c-bbee-41af-8b33-5fbd2a9f1366\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://545749088db52e53bffa5102e360b7375bbc67009648500c78ca3f7cff7840c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://137bc90add75a2547a2b06a1c89ec28f4663dce18a9d63c43f7ac06356e5881e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a008c5b415f0ed3afae7c959e0bc6d621f467ac7018f1a453ae8099fae7f7ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d83f204301337a2c81cd3735b7f301ecf6451d6046eebf151313712b0b781ad7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.564281 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a676d94-f6d4-42b2-9ea1-62ecda52e8b7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096a8eff1b601167d5c16fea534c438fe1fc135e61418626766c8d434b9afe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22c24e0218a3313854727773a84ff6021b749f39e764004422168b99fc67eb0b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.588807 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.588850 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.588863 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.588883 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.588897 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.611712 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6217364-7317-4ee9-957e-9a1764ff0342\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:37Z\\\",\\\"message\\\":\\\"d Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:37Z is after 2025-08-24T17:21:41Z]\\\\nI1130 06:47:37.618691 6963 lb_config.go:1031] Cluster endpoints for openshift-cluster-version/cluster-version-operator for network=default are: map[]\\\\nI1130 06:47:37.618704 6963 services_controller.go:451] Built service openshift-apiserver/api cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver/api_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver/api\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.37\\\\\\\", Port:443, Template:(*services.Te\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:47:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kdvkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-zntd2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.652497 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e42ab3a0-995b-4132-af38-66b45838b5b2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f72cffd42a26ac98694acd1faadd50dec365e46aaba89f11c2f7cf4f027368a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://448592a1f547e42c735706fc820f1d444d079de9e68f2d138746ca6e4c553c69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z8q2z\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5x2fq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.671454 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vzc7c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a2c22971-565b-44b0-9312-737c3931a558\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:47:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-30T06:47:28Z\\\",\\\"message\\\":\\\"2025-11-30T06:46:42+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f\\\\n2025-11-30T06:46:43+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_7e1d09f3-8bec-4bb3-9d2d-d14da1f2081f to /host/opt/cni/bin/\\\\n2025-11-30T06:46:43Z [verbose] multus-daemon started\\\\n2025-11-30T06:46:43Z [verbose] Readiness Indicator file check\\\\n2025-11-30T06:47:28Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:47:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mftnq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vzc7c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.690801 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1130 06:46:31.851652 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1130 06:46:31.853007 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-615262596/tls.crt::/tmp/serving-cert-615262596/tls.key\\\\\\\"\\\\nI1130 06:46:37.641204 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1130 06:46:37.649137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1130 06:46:37.649184 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1130 06:46:37.649256 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1130 06:46:37.649267 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1130 06:46:37.658177 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1130 06:46:37.658221 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1130 06:46:37.658229 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658242 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1130 06:46:37.658254 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1130 06:46:37.658262 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1130 06:46:37.658272 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1130 06:46:37.658280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1130 06:46:37.660689 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.691658 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.691701 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.691711 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.691732 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.691742 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.705147 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4cd96e3656bd218e5a3bbc10324145d05a75e1c87274e13350f19697572b2d92\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.717518 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.738204 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.752884 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a9f2308e538919ca18dc4675b202e0909be427fea07335627b36d482d4acaa3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.774914 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e44d4fe9e164d1a67b9aef03268ad847d89a9f51e4c196859ed8cfa942cba18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04ed3e0de6ef597c3f3f5bde34b29c46f9baf38257c48e4de272a106a105591d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.791989 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vv76k" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b02b1490-bb42-429b-8d83-592b38482a87\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29c4fa2a907f162290bce69f3d70d2c01ad33eed698fa68b1adf2df30c6e6c7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b2snb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vv76k\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.794464 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.794549 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.794559 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.794576 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.794587 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.817526 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"84de4c25-bc18-4219-8bb0-063e3c81bc4b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://afbf53b573dcc7467bbdf678490cc227abc5dd89765e40b8896b7b99bc2b4091\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28b79c8384dd4ff0369ffb7fefab6c2d8fc6711fdf3e75b5c6204c1d9d8aee6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c98b8812b61b751baa2e0b37c485c3e8e1ccd9314228a4b456d82d9726d2ab1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43293ea156b7c394ed916e9bb2407bf3b84ebc887636b56561283d751b682230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://389f098363a2ac6cd13000848fbec8fcc9f8124ec060b6f6875c570aebaaeab5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813da332ad7586030a90f8fce3a25e3f0f73b9cff17160f42ce3c7093ce71c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d9fe38dceb78e63deb66ed43997788485dadfbbf2c7bd0a0348ef09fae9df27\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b30d92b344231d159b55db9d12277a21a05c707da992a233fdef76c57dd389db\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.848090 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.863397 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-sm9jf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"096a351c-31d5-4186-8833-cf6693f30cc7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://34d33e512cdd80f94403d657e1540b3a287c693e2d83f44e270c4763df741ac6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d8mw6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-sm9jf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.887981 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eab361f-8591-4bbd-8dce-a51a1b95af2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21e7d39a49b73204e7a01f3a9ea7897899093041ade27b4b5f26d506ea0e09e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://21983902fc03637dee1a5d1dc9222d82b8e58dd59bed4f05a8c7b97fd8d969f1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4af046ef6c9372f6def7d7698e9513905fd27f52082038fc57a61a43dff75c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94f487399c89680d41a6a1d8c0f85384d5bb56a83cb5b8dd01d4953ee87e148e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e7292354446f310863df8685f8a15c401ca6d3fedc5fe15f1f83bda9dadd976\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d3094911ca7cec4fdb9d9539f8966b11f5c2a931d10541386bb8909284b9d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3145b76a86707f2a6c23060e72b4dddb3a10d074b8735aad97cffa3b1a4ce39a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-30T06:46:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-30T06:46:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z2f8k\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:42Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zr2rg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.897182 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.897288 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.897308 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.897381 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.897401 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:49Z","lastTransitionTime":"2025-11-30T06:47:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.906200 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ba34d142-c6e9-45bd-93a4-cf8e15558381\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gsr5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:56Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-vwfsk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.928801 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f17c1746-8992-477f-8058-2b0470e1d075\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1287956830a2489697aec40860a5285f07402e5bc06fc946533820afd53eff3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c444ccbc5439eca2bba16552f109c81e3bf22cae9984e55f92f98c8b746a819f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9b587cd74c92787a3ff15e2c9897d42848b1dc12799e6627450c2c7c444360b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:19Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:49 crc kubenswrapper[4941]: I1130 06:47:49.949004 4941 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d39a3a2-8387-4108-aad6-3bfd59ad0018\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-30T06:46:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2d7576def0a9a22adf9449aa6d5b794d1fd7a50c81c6fbc1b19003fe4085249\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-30T06:46:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b8mtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-30T06:46:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5pscg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-30T06:47:49Z is after 2025-08-24T17:21:41Z" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.000537 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.000813 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.000886 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.000987 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.001051 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.104352 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.104398 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.104408 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.104426 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.104439 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.207833 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.207867 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.207877 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.207892 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.207902 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.311531 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.311580 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.311597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.311621 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.311639 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.415253 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.415353 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.415384 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.415417 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.415440 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.519196 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.519296 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.519316 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.519375 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.519397 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.521487 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.521507 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:50 crc kubenswrapper[4941]: E1130 06:47:50.521709 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:50 crc kubenswrapper[4941]: E1130 06:47:50.521866 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.623431 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.623548 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.623577 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.623615 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.623643 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.727449 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.727545 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.727569 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.727599 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.727618 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.830960 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.831222 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.831290 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.831390 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.831475 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.934587 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.934629 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.934641 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.934660 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:50 crc kubenswrapper[4941]: I1130 06:47:50.934673 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:50Z","lastTransitionTime":"2025-11-30T06:47:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.037915 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.037978 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.038002 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.038033 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.038055 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.141174 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.141560 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.141711 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.141814 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.141903 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.245265 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.245356 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.245374 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.245398 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.245414 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.347630 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.347661 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.347670 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.347684 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.347693 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.451878 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.451932 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.451947 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.451967 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.451980 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.521195 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:51 crc kubenswrapper[4941]: E1130 06:47:51.521482 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.521659 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:51 crc kubenswrapper[4941]: E1130 06:47:51.521989 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.744619 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.744710 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.744733 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.744771 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.744807 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.848160 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.848199 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.848208 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.848226 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.848236 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.951597 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.951668 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.951687 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.951712 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:51 crc kubenswrapper[4941]: I1130 06:47:51.951730 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:51Z","lastTransitionTime":"2025-11-30T06:47:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.055288 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.055358 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.055368 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.055385 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.055396 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.158418 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.158469 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.158483 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.158501 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.158514 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.261549 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.261591 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.261600 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.261621 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.261634 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.364870 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.364936 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.364954 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.364986 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.365007 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.468542 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.468609 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.468628 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.468657 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.468692 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.521251 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.521308 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:52 crc kubenswrapper[4941]: E1130 06:47:52.521614 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:52 crc kubenswrapper[4941]: E1130 06:47:52.522167 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.522316 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:47:52 crc kubenswrapper[4941]: E1130 06:47:52.522485 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.571886 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.571953 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.571973 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.572001 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.572021 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.675779 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.675857 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.675882 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.675919 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.675945 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.778962 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.779012 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.779021 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.779043 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.779057 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.882694 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.882764 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.882783 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.882812 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.882831 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.986252 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.986370 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.986391 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.986426 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:52 crc kubenswrapper[4941]: I1130 06:47:52.986446 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:52Z","lastTransitionTime":"2025-11-30T06:47:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.090253 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.090318 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.090366 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.090394 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.090414 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:53Z","lastTransitionTime":"2025-11-30T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.194366 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.194451 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.194471 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.194506 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.194525 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:53Z","lastTransitionTime":"2025-11-30T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.297492 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.297562 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.297581 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.297610 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.297628 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:53Z","lastTransitionTime":"2025-11-30T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.401402 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.401467 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.401489 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.401516 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.401537 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:53Z","lastTransitionTime":"2025-11-30T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.505216 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.505272 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.505285 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.505306 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.505320 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:53Z","lastTransitionTime":"2025-11-30T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.521616 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.521687 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:53 crc kubenswrapper[4941]: E1130 06:47:53.521810 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:53 crc kubenswrapper[4941]: E1130 06:47:53.521906 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.523926 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.523974 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.523992 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.524014 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.524031 4941 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-30T06:47:53Z","lastTransitionTime":"2025-11-30T06:47:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.590304 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp"] Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.590970 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.593483 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.594685 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.597821 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.598450 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.644657 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-sm9jf" podStartSLOduration=72.644618525 podStartE2EDuration="1m12.644618525s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.640872958 +0000 UTC m=+94.409044577" watchObservedRunningTime="2025-11-30 06:47:53.644618525 +0000 UTC m=+94.412790164" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.661229 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/aa501ae8-390d-4880-8316-22ff374427b7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.661321 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/aa501ae8-390d-4880-8316-22ff374427b7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.661425 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aa501ae8-390d-4880-8316-22ff374427b7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.661461 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa501ae8-390d-4880-8316-22ff374427b7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.661610 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa501ae8-390d-4880-8316-22ff374427b7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.692065 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-zr2rg" podStartSLOduration=72.692032527 podStartE2EDuration="1m12.692032527s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.673434556 +0000 UTC m=+94.441606185" watchObservedRunningTime="2025-11-30 06:47:53.692032527 +0000 UTC m=+94.460204166" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.741596 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=15.741576896 podStartE2EDuration="15.741576896s" podCreationTimestamp="2025-11-30 06:47:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.739775309 +0000 UTC m=+94.507946968" watchObservedRunningTime="2025-11-30 06:47:53.741576896 +0000 UTC m=+94.509748525" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.758482 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podStartSLOduration=72.758451903 podStartE2EDuration="1m12.758451903s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.758141993 +0000 UTC m=+94.526313622" watchObservedRunningTime="2025-11-30 06:47:53.758451903 +0000 UTC m=+94.526623532" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.762780 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa501ae8-390d-4880-8316-22ff374427b7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.762878 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/aa501ae8-390d-4880-8316-22ff374427b7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.762922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/aa501ae8-390d-4880-8316-22ff374427b7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.762951 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa501ae8-390d-4880-8316-22ff374427b7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.762986 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aa501ae8-390d-4880-8316-22ff374427b7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.763119 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/aa501ae8-390d-4880-8316-22ff374427b7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.763185 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/aa501ae8-390d-4880-8316-22ff374427b7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.764263 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/aa501ae8-390d-4880-8316-22ff374427b7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.781349 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa501ae8-390d-4880-8316-22ff374427b7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.789092 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa501ae8-390d-4880-8316-22ff374427b7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-45hgp\" (UID: \"aa501ae8-390d-4880-8316-22ff374427b7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.795668 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.795648455 podStartE2EDuration="17.795648455s" podCreationTimestamp="2025-11-30 06:47:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.795390878 +0000 UTC m=+94.563562497" watchObservedRunningTime="2025-11-30 06:47:53.795648455 +0000 UTC m=+94.563820084" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.795890 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=73.795880782 podStartE2EDuration="1m13.795880782s" podCreationTimestamp="2025-11-30 06:46:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.780892375 +0000 UTC m=+94.549064004" watchObservedRunningTime="2025-11-30 06:47:53.795880782 +0000 UTC m=+94.564052411" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.856151 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5x2fq" podStartSLOduration=72.856124536 podStartE2EDuration="1m12.856124536s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.854944088 +0000 UTC m=+94.623115727" watchObservedRunningTime="2025-11-30 06:47:53.856124536 +0000 UTC m=+94.624296165" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.876160 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=44.876133991 podStartE2EDuration="44.876133991s" podCreationTimestamp="2025-11-30 06:47:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.87351547 +0000 UTC m=+94.641687089" watchObservedRunningTime="2025-11-30 06:47:53.876133991 +0000 UTC m=+94.644305610" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.910673 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" Nov 30 06:47:53 crc kubenswrapper[4941]: W1130 06:47:53.932443 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa501ae8_390d_4880_8316_22ff374427b7.slice/crio-f1b72bb86f9e6359916c81a4cdca6458bf86550b5b7976644de02271b00517f7 WatchSource:0}: Error finding container f1b72bb86f9e6359916c81a4cdca6458bf86550b5b7976644de02271b00517f7: Status 404 returned error can't find the container with id f1b72bb86f9e6359916c81a4cdca6458bf86550b5b7976644de02271b00517f7 Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.977887 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-vv76k" podStartSLOduration=72.97786283 podStartE2EDuration="1m12.97786283s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.976767376 +0000 UTC m=+94.744939025" watchObservedRunningTime="2025-11-30 06:47:53.97786283 +0000 UTC m=+94.746034439" Nov 30 06:47:53 crc kubenswrapper[4941]: I1130 06:47:53.998174 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vzc7c" podStartSLOduration=72.998150314 podStartE2EDuration="1m12.998150314s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:53.997769212 +0000 UTC m=+94.765940841" watchObservedRunningTime="2025-11-30 06:47:53.998150314 +0000 UTC m=+94.766321933" Nov 30 06:47:54 crc kubenswrapper[4941]: I1130 06:47:54.023083 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=76.023064233 podStartE2EDuration="1m16.023064233s" podCreationTimestamp="2025-11-30 06:46:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:54.021021929 +0000 UTC m=+94.789193538" watchObservedRunningTime="2025-11-30 06:47:54.023064233 +0000 UTC m=+94.791235842" Nov 30 06:47:54 crc kubenswrapper[4941]: I1130 06:47:54.075028 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" event={"ID":"aa501ae8-390d-4880-8316-22ff374427b7","Type":"ContainerStarted","Data":"f1b72bb86f9e6359916c81a4cdca6458bf86550b5b7976644de02271b00517f7"} Nov 30 06:47:54 crc kubenswrapper[4941]: I1130 06:47:54.521446 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:54 crc kubenswrapper[4941]: E1130 06:47:54.522167 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:54 crc kubenswrapper[4941]: I1130 06:47:54.521446 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:54 crc kubenswrapper[4941]: E1130 06:47:54.522432 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:55 crc kubenswrapper[4941]: I1130 06:47:55.081357 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" event={"ID":"aa501ae8-390d-4880-8316-22ff374427b7","Type":"ContainerStarted","Data":"7cd4559788d455b9c4f350d660449a9646b5d36c5ca7d28c14c3621f42beeb2d"} Nov 30 06:47:55 crc kubenswrapper[4941]: I1130 06:47:55.521698 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:55 crc kubenswrapper[4941]: I1130 06:47:55.521707 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:55 crc kubenswrapper[4941]: E1130 06:47:55.521932 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:55 crc kubenswrapper[4941]: E1130 06:47:55.522254 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:56 crc kubenswrapper[4941]: I1130 06:47:56.520705 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:56 crc kubenswrapper[4941]: I1130 06:47:56.520809 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:56 crc kubenswrapper[4941]: E1130 06:47:56.520948 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:56 crc kubenswrapper[4941]: E1130 06:47:56.521194 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:57 crc kubenswrapper[4941]: I1130 06:47:57.520631 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:57 crc kubenswrapper[4941]: E1130 06:47:57.520858 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:47:57 crc kubenswrapper[4941]: I1130 06:47:57.520934 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:57 crc kubenswrapper[4941]: E1130 06:47:57.521181 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:58 crc kubenswrapper[4941]: I1130 06:47:58.520835 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:47:58 crc kubenswrapper[4941]: I1130 06:47:58.520963 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:47:58 crc kubenswrapper[4941]: E1130 06:47:58.521031 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:47:58 crc kubenswrapper[4941]: E1130 06:47:58.521211 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:47:59 crc kubenswrapper[4941]: I1130 06:47:59.520961 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:47:59 crc kubenswrapper[4941]: I1130 06:47:59.522246 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:47:59 crc kubenswrapper[4941]: E1130 06:47:59.522480 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:47:59 crc kubenswrapper[4941]: E1130 06:47:59.522723 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:00 crc kubenswrapper[4941]: I1130 06:48:00.238617 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:00 crc kubenswrapper[4941]: E1130 06:48:00.238841 4941 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:48:00 crc kubenswrapper[4941]: E1130 06:48:00.238916 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs podName:ba34d142-c6e9-45bd-93a4-cf8e15558381 nodeName:}" failed. No retries permitted until 2025-11-30 06:49:04.238895997 +0000 UTC m=+165.007067616 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs") pod "network-metrics-daemon-vwfsk" (UID: "ba34d142-c6e9-45bd-93a4-cf8e15558381") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 30 06:48:00 crc kubenswrapper[4941]: I1130 06:48:00.520743 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:00 crc kubenswrapper[4941]: E1130 06:48:00.520897 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:00 crc kubenswrapper[4941]: I1130 06:48:00.521290 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:00 crc kubenswrapper[4941]: E1130 06:48:00.521375 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:01 crc kubenswrapper[4941]: I1130 06:48:01.520815 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:01 crc kubenswrapper[4941]: I1130 06:48:01.520890 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:01 crc kubenswrapper[4941]: E1130 06:48:01.521081 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:01 crc kubenswrapper[4941]: E1130 06:48:01.521292 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:02 crc kubenswrapper[4941]: I1130 06:48:02.521789 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:02 crc kubenswrapper[4941]: I1130 06:48:02.521863 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:02 crc kubenswrapper[4941]: E1130 06:48:02.522068 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:02 crc kubenswrapper[4941]: E1130 06:48:02.522391 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:03 crc kubenswrapper[4941]: I1130 06:48:03.521681 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:03 crc kubenswrapper[4941]: I1130 06:48:03.522412 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:03 crc kubenswrapper[4941]: E1130 06:48:03.522564 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:03 crc kubenswrapper[4941]: E1130 06:48:03.522696 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:04 crc kubenswrapper[4941]: I1130 06:48:04.521160 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:04 crc kubenswrapper[4941]: E1130 06:48:04.521652 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:04 crc kubenswrapper[4941]: I1130 06:48:04.521166 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:04 crc kubenswrapper[4941]: E1130 06:48:04.521995 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:05 crc kubenswrapper[4941]: I1130 06:48:05.521485 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:05 crc kubenswrapper[4941]: I1130 06:48:05.521532 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:05 crc kubenswrapper[4941]: E1130 06:48:05.521748 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:05 crc kubenswrapper[4941]: E1130 06:48:05.521823 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:06 crc kubenswrapper[4941]: I1130 06:48:06.521544 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:06 crc kubenswrapper[4941]: I1130 06:48:06.521633 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:06 crc kubenswrapper[4941]: E1130 06:48:06.521782 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:06 crc kubenswrapper[4941]: E1130 06:48:06.522013 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:07 crc kubenswrapper[4941]: I1130 06:48:07.521730 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:07 crc kubenswrapper[4941]: I1130 06:48:07.521842 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:07 crc kubenswrapper[4941]: E1130 06:48:07.522525 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:07 crc kubenswrapper[4941]: E1130 06:48:07.522736 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:07 crc kubenswrapper[4941]: I1130 06:48:07.523078 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:48:07 crc kubenswrapper[4941]: E1130 06:48:07.523401 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-zntd2_openshift-ovn-kubernetes(a6217364-7317-4ee9-957e-9a1764ff0342)\"" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" Nov 30 06:48:08 crc kubenswrapper[4941]: I1130 06:48:08.521217 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:08 crc kubenswrapper[4941]: I1130 06:48:08.521280 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:08 crc kubenswrapper[4941]: E1130 06:48:08.521509 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:08 crc kubenswrapper[4941]: E1130 06:48:08.521683 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:09 crc kubenswrapper[4941]: I1130 06:48:09.523203 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:09 crc kubenswrapper[4941]: E1130 06:48:09.523402 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:09 crc kubenswrapper[4941]: I1130 06:48:09.523728 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:09 crc kubenswrapper[4941]: E1130 06:48:09.523835 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:10 crc kubenswrapper[4941]: I1130 06:48:10.521518 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:10 crc kubenswrapper[4941]: I1130 06:48:10.521700 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:10 crc kubenswrapper[4941]: E1130 06:48:10.522639 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:10 crc kubenswrapper[4941]: E1130 06:48:10.522900 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:11 crc kubenswrapper[4941]: I1130 06:48:11.521035 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:11 crc kubenswrapper[4941]: I1130 06:48:11.521080 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:11 crc kubenswrapper[4941]: E1130 06:48:11.521238 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:11 crc kubenswrapper[4941]: E1130 06:48:11.521405 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:12 crc kubenswrapper[4941]: I1130 06:48:12.521495 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:12 crc kubenswrapper[4941]: I1130 06:48:12.521637 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:12 crc kubenswrapper[4941]: E1130 06:48:12.521651 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:12 crc kubenswrapper[4941]: E1130 06:48:12.521866 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:13 crc kubenswrapper[4941]: I1130 06:48:13.521455 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:13 crc kubenswrapper[4941]: I1130 06:48:13.521470 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:13 crc kubenswrapper[4941]: E1130 06:48:13.521686 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:13 crc kubenswrapper[4941]: E1130 06:48:13.521916 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:14 crc kubenswrapper[4941]: I1130 06:48:14.520932 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:14 crc kubenswrapper[4941]: I1130 06:48:14.520956 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:14 crc kubenswrapper[4941]: E1130 06:48:14.521117 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:14 crc kubenswrapper[4941]: E1130 06:48:14.521239 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.157240 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/1.log" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.159105 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/0.log" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.159445 4941 generic.go:334] "Generic (PLEG): container finished" podID="a2c22971-565b-44b0-9312-737c3931a558" containerID="72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4" exitCode=1 Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.159695 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerDied","Data":"72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4"} Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.159926 4941 scope.go:117] "RemoveContainer" containerID="475d4e04eadce6388f8ad4d9a01cc026f022a5303ca48943330ec0d04658baa4" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.161542 4941 scope.go:117] "RemoveContainer" containerID="72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4" Nov 30 06:48:15 crc kubenswrapper[4941]: E1130 06:48:15.162298 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vzc7c_openshift-multus(a2c22971-565b-44b0-9312-737c3931a558)\"" pod="openshift-multus/multus-vzc7c" podUID="a2c22971-565b-44b0-9312-737c3931a558" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.195122 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-45hgp" podStartSLOduration=94.19509162 podStartE2EDuration="1m34.19509162s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:47:55.106594758 +0000 UTC m=+95.874766457" watchObservedRunningTime="2025-11-30 06:48:15.19509162 +0000 UTC m=+115.963263259" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.521452 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:15 crc kubenswrapper[4941]: E1130 06:48:15.521684 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:15 crc kubenswrapper[4941]: I1130 06:48:15.522009 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:15 crc kubenswrapper[4941]: E1130 06:48:15.522148 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:16 crc kubenswrapper[4941]: I1130 06:48:16.163876 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/1.log" Nov 30 06:48:16 crc kubenswrapper[4941]: I1130 06:48:16.520942 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:16 crc kubenswrapper[4941]: I1130 06:48:16.521050 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:16 crc kubenswrapper[4941]: E1130 06:48:16.521109 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:16 crc kubenswrapper[4941]: E1130 06:48:16.521259 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:17 crc kubenswrapper[4941]: I1130 06:48:17.521008 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:17 crc kubenswrapper[4941]: E1130 06:48:17.521317 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:17 crc kubenswrapper[4941]: I1130 06:48:17.521464 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:17 crc kubenswrapper[4941]: E1130 06:48:17.521741 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:18 crc kubenswrapper[4941]: I1130 06:48:18.520666 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:18 crc kubenswrapper[4941]: I1130 06:48:18.520750 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:18 crc kubenswrapper[4941]: E1130 06:48:18.520855 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:18 crc kubenswrapper[4941]: E1130 06:48:18.521100 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:19 crc kubenswrapper[4941]: I1130 06:48:19.522062 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:19 crc kubenswrapper[4941]: E1130 06:48:19.522282 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:19 crc kubenswrapper[4941]: I1130 06:48:19.522602 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:19 crc kubenswrapper[4941]: I1130 06:48:19.523277 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:48:19 crc kubenswrapper[4941]: E1130 06:48:19.523779 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:19 crc kubenswrapper[4941]: E1130 06:48:19.557229 4941 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 30 06:48:19 crc kubenswrapper[4941]: E1130 06:48:19.613766 4941 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.186861 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/3.log" Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.190749 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerStarted","Data":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.192838 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.229878 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podStartSLOduration=99.229855231 podStartE2EDuration="1m39.229855231s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:20.228768648 +0000 UTC m=+120.996940297" watchObservedRunningTime="2025-11-30 06:48:20.229855231 +0000 UTC m=+120.998026860" Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.493498 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vwfsk"] Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.493708 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:20 crc kubenswrapper[4941]: E1130 06:48:20.493866 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.521224 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:20 crc kubenswrapper[4941]: I1130 06:48:20.521283 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:20 crc kubenswrapper[4941]: E1130 06:48:20.521460 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:20 crc kubenswrapper[4941]: E1130 06:48:20.521762 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:21 crc kubenswrapper[4941]: I1130 06:48:21.520923 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:21 crc kubenswrapper[4941]: E1130 06:48:21.521114 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:22 crc kubenswrapper[4941]: I1130 06:48:22.521088 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:22 crc kubenswrapper[4941]: E1130 06:48:22.521703 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:22 crc kubenswrapper[4941]: I1130 06:48:22.521397 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:22 crc kubenswrapper[4941]: I1130 06:48:22.521291 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:22 crc kubenswrapper[4941]: E1130 06:48:22.521809 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:22 crc kubenswrapper[4941]: E1130 06:48:22.522055 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:23 crc kubenswrapper[4941]: I1130 06:48:23.521655 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:23 crc kubenswrapper[4941]: E1130 06:48:23.521885 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:24 crc kubenswrapper[4941]: I1130 06:48:24.521305 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:24 crc kubenswrapper[4941]: I1130 06:48:24.521405 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:24 crc kubenswrapper[4941]: I1130 06:48:24.521406 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:24 crc kubenswrapper[4941]: E1130 06:48:24.521599 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:24 crc kubenswrapper[4941]: E1130 06:48:24.521957 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:24 crc kubenswrapper[4941]: E1130 06:48:24.522055 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:24 crc kubenswrapper[4941]: E1130 06:48:24.615494 4941 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 30 06:48:25 crc kubenswrapper[4941]: I1130 06:48:25.521635 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:25 crc kubenswrapper[4941]: E1130 06:48:25.521863 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:26 crc kubenswrapper[4941]: I1130 06:48:26.521381 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:26 crc kubenswrapper[4941]: I1130 06:48:26.521465 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:26 crc kubenswrapper[4941]: E1130 06:48:26.521567 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:26 crc kubenswrapper[4941]: E1130 06:48:26.521680 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:26 crc kubenswrapper[4941]: I1130 06:48:26.521759 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:26 crc kubenswrapper[4941]: E1130 06:48:26.521848 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:27 crc kubenswrapper[4941]: I1130 06:48:27.521067 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:27 crc kubenswrapper[4941]: E1130 06:48:27.521389 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:28 crc kubenswrapper[4941]: I1130 06:48:28.520786 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:28 crc kubenswrapper[4941]: I1130 06:48:28.520812 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:28 crc kubenswrapper[4941]: I1130 06:48:28.520889 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:28 crc kubenswrapper[4941]: E1130 06:48:28.521262 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:28 crc kubenswrapper[4941]: E1130 06:48:28.521465 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:28 crc kubenswrapper[4941]: E1130 06:48:28.522507 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:28 crc kubenswrapper[4941]: I1130 06:48:28.522698 4941 scope.go:117] "RemoveContainer" containerID="72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4" Nov 30 06:48:29 crc kubenswrapper[4941]: I1130 06:48:29.228251 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/1.log" Nov 30 06:48:29 crc kubenswrapper[4941]: I1130 06:48:29.228362 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerStarted","Data":"f4933ca1d46a47b00734deaa91d22c2cfee015918f3ff0d5764ac80ce7d6f0f1"} Nov 30 06:48:29 crc kubenswrapper[4941]: I1130 06:48:29.521105 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:29 crc kubenswrapper[4941]: E1130 06:48:29.523116 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:29 crc kubenswrapper[4941]: E1130 06:48:29.616468 4941 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 30 06:48:30 crc kubenswrapper[4941]: I1130 06:48:30.520801 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:30 crc kubenswrapper[4941]: I1130 06:48:30.520827 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:30 crc kubenswrapper[4941]: E1130 06:48:30.521059 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:30 crc kubenswrapper[4941]: I1130 06:48:30.520835 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:30 crc kubenswrapper[4941]: E1130 06:48:30.521213 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:30 crc kubenswrapper[4941]: E1130 06:48:30.521390 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:31 crc kubenswrapper[4941]: I1130 06:48:31.521298 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:31 crc kubenswrapper[4941]: E1130 06:48:31.521614 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:32 crc kubenswrapper[4941]: I1130 06:48:32.521645 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:32 crc kubenswrapper[4941]: E1130 06:48:32.521839 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:32 crc kubenswrapper[4941]: I1130 06:48:32.522219 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:32 crc kubenswrapper[4941]: E1130 06:48:32.522361 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:32 crc kubenswrapper[4941]: I1130 06:48:32.522567 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:32 crc kubenswrapper[4941]: E1130 06:48:32.522666 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:33 crc kubenswrapper[4941]: I1130 06:48:33.521704 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:33 crc kubenswrapper[4941]: E1130 06:48:33.521945 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 30 06:48:34 crc kubenswrapper[4941]: I1130 06:48:34.521677 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:34 crc kubenswrapper[4941]: I1130 06:48:34.521751 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:34 crc kubenswrapper[4941]: I1130 06:48:34.521899 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:34 crc kubenswrapper[4941]: E1130 06:48:34.521911 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 30 06:48:34 crc kubenswrapper[4941]: E1130 06:48:34.522020 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-vwfsk" podUID="ba34d142-c6e9-45bd-93a4-cf8e15558381" Nov 30 06:48:34 crc kubenswrapper[4941]: E1130 06:48:34.522115 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 30 06:48:35 crc kubenswrapper[4941]: I1130 06:48:35.521131 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:35 crc kubenswrapper[4941]: I1130 06:48:35.524738 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 30 06:48:35 crc kubenswrapper[4941]: I1130 06:48:35.524768 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.521256 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.521300 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.521316 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.524283 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.524493 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.524920 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 30 06:48:36 crc kubenswrapper[4941]: I1130 06:48:36.529261 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.693584 4941 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.783857 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.789834 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.795467 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.802434 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.802658 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.804620 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.804999 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-526zm"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.805715 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-css84"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.806301 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.805959 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.808075 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.808954 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.811889 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mt5dk"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.812367 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pkrwj"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.812744 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vhvpc"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.813625 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.814140 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.813506 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.813578 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.813588 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.815811 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.816109 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-gjx4m"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.816731 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.821611 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.822172 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.823551 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.823791 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.823958 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824076 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824269 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824465 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824572 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824686 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-frzl6"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824709 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824018 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.824511 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.825659 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.826252 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.827097 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.827180 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.828438 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.828781 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrszt"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.829086 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.829215 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.829416 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.829742 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-vkq5m"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.833680 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krsrr"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.834255 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.834310 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.834495 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.835804 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.836624 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.837056 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.837821 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4qn2t"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.838807 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.864664 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.867905 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.869410 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.874943 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.875676 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.875715 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.875801 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.875874 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.875912 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.876496 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.876537 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.876808 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.877422 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.880139 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.880709 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.880916 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.888054 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.890970 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.889176 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.889349 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.892203 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.893153 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/465a2899-647c-4144-8810-46a4a4e49909-images\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.894732 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqv8f\" (UniqueName: \"kubernetes.io/projected/465a2899-647c-4144-8810-46a4a4e49909-kube-api-access-nqv8f\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.894850 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-image-import-ca\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.894957 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895061 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-config\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895187 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-etcd-client\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895295 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895430 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895539 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895637 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895737 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843edada-6eb9-46da-ba98-05ccfcd4cb1b-serving-cert\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.895875 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f1969d8-c7f9-4c39-9727-e27a26020d46-serving-cert\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.896012 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.896129 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.896281 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a81839-4806-4112-8c30-fb70049d8296-serving-cert\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.896421 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.896545 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/465a2899-647c-4144-8810-46a4a4e49909-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899262 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-audit-policies\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899370 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f1969d8-c7f9-4c39-9727-e27a26020d46-config\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899491 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899580 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzvtl\" (UniqueName: \"kubernetes.io/projected/8f1969d8-c7f9-4c39-9727-e27a26020d46-kube-api-access-bzvtl\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899661 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7q5m\" (UniqueName: \"kubernetes.io/projected/ab7371ad-5001-4223-84f1-c7d55ce3c5f3-kube-api-access-d7q5m\") pod \"dns-operator-744455d44c-4qn2t\" (UID: \"ab7371ad-5001-4223-84f1-c7d55ce3c5f3\") " pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899754 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/179be37e-b722-4f7b-ab6a-88fb1407c2e7-audit-dir\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899828 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-dir\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899907 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-policies\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.899978 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-config\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900053 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-serving-cert\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900125 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-service-ca-bundle\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900268 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/179be37e-b722-4f7b-ab6a-88fb1407c2e7-node-pullsecrets\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900354 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-audit\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900445 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900529 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900661 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f1969d8-c7f9-4c39-9727-e27a26020d46-trusted-ca\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900746 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.900824 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6krj\" (UniqueName: \"kubernetes.io/projected/843edada-6eb9-46da-ba98-05ccfcd4cb1b-kube-api-access-x6krj\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cft2r\" (UniqueName: \"kubernetes.io/projected/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-kube-api-access-cft2r\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901135 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/94a81839-4806-4112-8c30-fb70049d8296-available-featuregates\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901221 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4md6\" (UniqueName: \"kubernetes.io/projected/179be37e-b722-4f7b-ab6a-88fb1407c2e7-kube-api-access-t4md6\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901274 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/465a2899-647c-4144-8810-46a4a4e49909-config\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901312 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-serving-cert\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901360 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-etcd-client\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901384 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-client-ca\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901447 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-encryption-config\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901478 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-serving-cert\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901507 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-config\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901527 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sknc\" (UniqueName: \"kubernetes.io/projected/94a81839-4806-4112-8c30-fb70049d8296-kube-api-access-9sknc\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901557 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-trusted-ca-bundle\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901583 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901602 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ab7371ad-5001-4223-84f1-c7d55ce3c5f3-metrics-tls\") pod \"dns-operator-744455d44c-4qn2t\" (UID: \"ab7371ad-5001-4223-84f1-c7d55ce3c5f3\") " pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901686 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-encryption-config\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901715 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z58l6\" (UniqueName: \"kubernetes.io/projected/6c62f053-996e-44b6-9a65-3d7f292b6cef-kube-api-access-z58l6\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901763 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f38e9faf-5fe2-49c8-8516-2b5f2766199e-audit-dir\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901787 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-etcd-serving-ca\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901806 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.901831 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s96gj\" (UniqueName: \"kubernetes.io/projected/f38e9faf-5fe2-49c8-8516-2b5f2766199e-kube-api-access-s96gj\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.902394 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-rwj5c"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.902749 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.903139 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.904156 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.905937 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.906208 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.906726 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.906985 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.907225 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.907434 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.907867 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908298 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908377 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908560 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908588 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908700 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908822 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908953 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.908970 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.909234 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pgpzd"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.909815 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.910360 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.910875 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.911554 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.911780 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.912665 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.913135 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.915076 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.915532 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.915761 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.915879 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916011 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916133 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916498 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916696 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916818 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916933 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.917182 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.917399 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.917619 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.922090 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.923157 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.923395 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.923734 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924011 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924480 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924648 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924831 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.925011 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.925177 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.925355 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.925645 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.926506 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916946 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916752 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924159 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924226 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924266 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.916709 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.924107 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.932228 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.933055 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.936552 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.938231 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.964803 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.966503 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.966554 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.967598 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.968025 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.973076 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.976556 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.976914 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.977190 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.977385 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-526zm"] Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.994451 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 30 06:48:44 crc kubenswrapper[4941]: I1130 06:48:44.995205 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.003612 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004368 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004416 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004448 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a81839-4806-4112-8c30-fb70049d8296-serving-cert\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004464 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004484 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/465a2899-647c-4144-8810-46a4a4e49909-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004499 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-audit-policies\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004513 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f1969d8-c7f9-4c39-9727-e27a26020d46-config\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004529 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004556 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzvtl\" (UniqueName: \"kubernetes.io/projected/8f1969d8-c7f9-4c39-9727-e27a26020d46-kube-api-access-bzvtl\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004573 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7q5m\" (UniqueName: \"kubernetes.io/projected/ab7371ad-5001-4223-84f1-c7d55ce3c5f3-kube-api-access-d7q5m\") pod \"dns-operator-744455d44c-4qn2t\" (UID: \"ab7371ad-5001-4223-84f1-c7d55ce3c5f3\") " pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004589 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-dir\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004609 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/179be37e-b722-4f7b-ab6a-88fb1407c2e7-audit-dir\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004626 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-policies\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004640 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-config\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-serving-cert\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004672 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004699 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/179be37e-b722-4f7b-ab6a-88fb1407c2e7-node-pullsecrets\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004719 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-audit\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004758 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-service-ca-bundle\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004776 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004796 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6krj\" (UniqueName: \"kubernetes.io/projected/843edada-6eb9-46da-ba98-05ccfcd4cb1b-kube-api-access-x6krj\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004813 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f1969d8-c7f9-4c39-9727-e27a26020d46-trusted-ca\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004831 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004852 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cft2r\" (UniqueName: \"kubernetes.io/projected/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-kube-api-access-cft2r\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004872 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/94a81839-4806-4112-8c30-fb70049d8296-available-featuregates\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004891 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4md6\" (UniqueName: \"kubernetes.io/projected/179be37e-b722-4f7b-ab6a-88fb1407c2e7-kube-api-access-t4md6\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/465a2899-647c-4144-8810-46a4a4e49909-config\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004954 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-client-ca\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.004979 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-serving-cert\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005007 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-etcd-client\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005034 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-serving-cert\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005066 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-encryption-config\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005098 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-trusted-ca-bundle\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005123 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005156 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ab7371ad-5001-4223-84f1-c7d55ce3c5f3-metrics-tls\") pod \"dns-operator-744455d44c-4qn2t\" (UID: \"ab7371ad-5001-4223-84f1-c7d55ce3c5f3\") " pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005182 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-config\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005214 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sknc\" (UniqueName: \"kubernetes.io/projected/94a81839-4806-4112-8c30-fb70049d8296-kube-api-access-9sknc\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005244 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-encryption-config\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005269 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z58l6\" (UniqueName: \"kubernetes.io/projected/6c62f053-996e-44b6-9a65-3d7f292b6cef-kube-api-access-z58l6\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005304 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f38e9faf-5fe2-49c8-8516-2b5f2766199e-audit-dir\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005342 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-etcd-serving-ca\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005381 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005408 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s96gj\" (UniqueName: \"kubernetes.io/projected/f38e9faf-5fe2-49c8-8516-2b5f2766199e-kube-api-access-s96gj\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005433 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqv8f\" (UniqueName: \"kubernetes.io/projected/465a2899-647c-4144-8810-46a4a4e49909-kube-api-access-nqv8f\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005452 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-image-import-ca\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005483 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005499 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/465a2899-647c-4144-8810-46a4a4e49909-images\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005517 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-config\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005547 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-etcd-client\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005541 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005843 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005564 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.005998 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.006034 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.006058 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.006106 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843edada-6eb9-46da-ba98-05ccfcd4cb1b-serving-cert\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.006134 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f1969d8-c7f9-4c39-9727-e27a26020d46-serving-cert\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.007419 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.008270 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.008933 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/94a81839-4806-4112-8c30-fb70049d8296-available-featuregates\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.016403 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.017124 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.018182 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/465a2899-647c-4144-8810-46a4a4e49909-config\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.024657 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f38e9faf-5fe2-49c8-8516-2b5f2766199e-audit-dir\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.028354 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.029206 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f1969d8-c7f9-4c39-9727-e27a26020d46-serving-cert\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.029623 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.029936 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-css84"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.029967 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-encryption-config\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.031065 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.033985 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jfj9x"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.036225 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-client-ca\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.036726 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94a81839-4806-4112-8c30-fb70049d8296-serving-cert\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.056753 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.057309 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.082521 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.083660 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.083721 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/179be37e-b722-4f7b-ab6a-88fb1407c2e7-node-pullsecrets\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.083752 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-audit\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.084638 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-config\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.085153 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/465a2899-647c-4144-8810-46a4a4e49909-images\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.085597 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.086144 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.086462 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.087030 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.087896 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zvvg4"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.088068 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-service-ca-bundle\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.088230 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.088556 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-image-import-ca\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.089560 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.089871 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-config\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.090079 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.090766 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.091173 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.091523 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-trusted-ca-bundle\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.091737 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vw95c"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.091920 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f1969d8-c7f9-4c39-9727-e27a26020d46-trusted-ca\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.092007 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-config\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.092080 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-dir\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.092205 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-46p7c"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.092254 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093232 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f38e9faf-5fe2-49c8-8516-2b5f2766199e-audit-policies\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093274 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093478 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093544 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093589 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093642 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093802 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f1969d8-c7f9-4c39-9727-e27a26020d46-config\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.092258 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.093929 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843edada-6eb9-46da-ba98-05ccfcd4cb1b-serving-cert\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.094038 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.094106 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/179be37e-b722-4f7b-ab6a-88fb1407c2e7-audit-dir\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.094136 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.094488 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.094782 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.094853 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.095260 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ab7371ad-5001-4223-84f1-c7d55ce3c5f3-metrics-tls\") pod \"dns-operator-744455d44c-4qn2t\" (UID: \"ab7371ad-5001-4223-84f1-c7d55ce3c5f3\") " pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.095784 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.095808 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.096289 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.096992 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.097047 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-policies\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.097389 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/179be37e-b722-4f7b-ab6a-88fb1407c2e7-etcd-serving-ca\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.097594 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.097613 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.097703 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.097786 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.098915 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.100994 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/465a2899-647c-4144-8810-46a4a4e49909-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.101270 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.101904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-etcd-client\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.102273 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-serving-cert\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.102983 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.103109 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.103314 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.103746 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.103757 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.105644 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-serving-cert\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.106801 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-etcd-client\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107525 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-bound-sa-token\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107559 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52150dbc-4724-4cc3-a326-5caaae27246e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107591 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ccfee3c-1083-4a51-a25b-4678f31d3a51-serving-cert\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107617 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-registry-certificates\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107636 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpq8s\" (UniqueName: \"kubernetes.io/projected/abdfc490-9871-4dd3-84b3-5f446f68f102-kube-api-access-gpq8s\") pod \"cluster-samples-operator-665b6dd947-hzh6t\" (UID: \"abdfc490-9871-4dd3-84b3-5f446f68f102\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107671 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-oauth-serving-cert\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107709 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107731 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24xhs\" (UniqueName: \"kubernetes.io/projected/5ccfee3c-1083-4a51-a25b-4678f31d3a51-kube-api-access-24xhs\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107760 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7vtg\" (UniqueName: \"kubernetes.io/projected/df9aa967-eec9-4ce5-9c64-edff3aedca4a-kube-api-access-c7vtg\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107777 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-trusted-ca\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107816 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-config\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107863 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107885 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-config\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107917 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4425t\" (UniqueName: \"kubernetes.io/projected/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-kube-api-access-4425t\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107951 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-registry-tls\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.107977 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8ngh\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-kube-api-access-x8ngh\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108002 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-client-ca\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108082 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-metrics-tls\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108151 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-trusted-ca\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108250 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c335028-5b89-477e-9bf1-e76f8f249d1a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108345 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c335028-5b89-477e-9bf1-e76f8f249d1a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108417 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52150dbc-4724-4cc3-a326-5caaae27246e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.108434 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.608420365 +0000 UTC m=+146.376592194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108460 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-service-ca\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108504 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-oauth-config\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108537 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c335028-5b89-477e-9bf1-e76f8f249d1a-config\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108606 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-serving-cert\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108640 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-trusted-ca-bundle\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108704 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/abdfc490-9871-4dd3-84b3-5f446f68f102-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hzh6t\" (UID: \"abdfc490-9871-4dd3-84b3-5f446f68f102\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.108781 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l62tx\" (UniqueName: \"kubernetes.io/projected/542c0442-1fb2-4e3e-ba06-a14526cf98ce-kube-api-access-l62tx\") pod \"downloads-7954f5f757-vkq5m\" (UID: \"542c0442-1fb2-4e3e-ba06-a14526cf98ce\") " pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.109694 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f38e9faf-5fe2-49c8-8516-2b5f2766199e-encryption-config\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.109866 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/179be37e-b722-4f7b-ab6a-88fb1407c2e7-serving-cert\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.114043 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.114260 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pkrwj"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.114298 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vhvpc"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.115128 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mt5dk"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.116122 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.117533 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.118859 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krsrr"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.119802 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.121075 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.121968 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrszt"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.123307 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pgpzd"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.123952 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-frzl6"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.124922 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4qn2t"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.126759 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-gjx4m"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.127059 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vkq5m"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.131450 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.133074 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.134150 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.145620 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-hcx8m"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.147535 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-57xxf"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.147697 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.148408 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.148753 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.149970 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zvvg4"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.151468 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.152409 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vw95c"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.153580 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.153726 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.155304 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-46p7c"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.156411 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.157660 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.158785 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.160002 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.161663 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jfj9x"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.162607 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.163670 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-57xxf"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.165074 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.166262 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.167558 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.168807 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.169940 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.171085 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-st8vq"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.172097 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.172225 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-st8vq"] Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.172744 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.192652 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209306 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.209456 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.709426701 +0000 UTC m=+146.477598321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209642 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czwz9\" (UniqueName: \"kubernetes.io/projected/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-kube-api-access-czwz9\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209691 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-oauth-serving-cert\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209714 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29524d66-9853-4dde-9ba5-9e124a055dd2-config\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209740 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgcf5\" (UniqueName: \"kubernetes.io/projected/43fead9f-0d7a-4d82-8822-b4e83849d4ad-kube-api-access-tgcf5\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209771 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209793 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24xhs\" (UniqueName: \"kubernetes.io/projected/5ccfee3c-1083-4a51-a25b-4678f31d3a51-kube-api-access-24xhs\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209814 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9118a944-3b9c-4f67-917a-899581233d1e-machine-approver-tls\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209838 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c4234d88-5b46-45ed-8a17-ac586277459f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209901 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209919 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29524d66-9853-4dde-9ba5-9e124a055dd2-serving-cert\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209937 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209973 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-config\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.209991 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-profile-collector-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210011 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-config\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210034 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210059 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/cc097706-3829-4b7b-9047-97a52667c825-signing-cabundle\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210098 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m95rt\" (UniqueName: \"kubernetes.io/projected/0cffa0e5-cf3c-450b-b289-d52fe242ad11-kube-api-access-m95rt\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210364 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-registration-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210659 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4425t\" (UniqueName: \"kubernetes.io/projected/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-kube-api-access-4425t\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210848 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt2ml\" (UniqueName: \"kubernetes.io/projected/b426c32f-f8fd-4019-af68-7d5febadde67-kube-api-access-jt2ml\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.210978 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fpwk\" (UniqueName: \"kubernetes.io/projected/fb91f88b-4fc5-464d-ac96-88b0828e67c0-kube-api-access-8fpwk\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211064 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8ngh\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-kube-api-access-x8ngh\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211114 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211262 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-oauth-serving-cert\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211300 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-config\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211279 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cffa0e5-cf3c-450b-b289-d52fe242ad11-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211452 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-metrics-tls\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211519 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-client\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211768 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211844 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-metrics-certs\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211899 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-trusted-ca\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211944 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-config\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.211983 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pclv\" (UniqueName: \"kubernetes.io/projected/c4666640-fb87-46f0-8731-f9946f1f7470-kube-api-access-7pclv\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212033 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212076 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d7c5acd-fc0a-4c18-8b24-59227b10369f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212146 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2faf43c0-49a3-41f7-a278-180d44c03689-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212220 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/550aea27-f732-47cb-9c04-93edb6216ae0-tmpfs\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212287 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-srv-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212352 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/550aea27-f732-47cb-9c04-93edb6216ae0-webhook-cert\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212524 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52150dbc-4724-4cc3-a326-5caaae27246e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212575 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4666640-fb87-46f0-8731-f9946f1f7470-serving-cert\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212618 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rckqf\" (UniqueName: \"kubernetes.io/projected/ef794854-78b3-48a6-8fa0-3d2bd613ecd2-kube-api-access-rckqf\") pod \"multus-admission-controller-857f4d67dd-jfj9x\" (UID: \"ef794854-78b3-48a6-8fa0-3d2bd613ecd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212667 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e6486e25-f042-42ac-a9e4-8a399bf9b414-metrics-tls\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212819 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f90130b9-9551-4823-9397-48b583729552-auth-proxy-config\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.212931 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9118a944-3b9c-4f67-917a-899581233d1e-auth-proxy-config\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213053 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b426c32f-f8fd-4019-af68-7d5febadde67-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213230 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-serving-cert\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213367 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/abdfc490-9871-4dd3-84b3-5f446f68f102-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hzh6t\" (UID: \"abdfc490-9871-4dd3-84b3-5f446f68f102\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213399 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213429 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l62tx\" (UniqueName: \"kubernetes.io/projected/542c0442-1fb2-4e3e-ba06-a14526cf98ce-kube-api-access-l62tx\") pod \"downloads-7954f5f757-vkq5m\" (UID: \"542c0442-1fb2-4e3e-ba06-a14526cf98ce\") " pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213443 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52150dbc-4724-4cc3-a326-5caaae27246e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213456 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-socket-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213532 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fb91f88b-4fc5-464d-ac96-88b0828e67c0-service-ca-bundle\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213572 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2faf43c0-49a3-41f7-a278-180d44c03689-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213632 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9118a944-3b9c-4f67-917a-899581233d1e-config\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213375 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.213716 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214061 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52150dbc-4724-4cc3-a326-5caaae27246e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214141 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc4gd\" (UniqueName: \"kubernetes.io/projected/c4234d88-5b46-45ed-8a17-ac586277459f-kube-api-access-hc4gd\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214217 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ccfee3c-1083-4a51-a25b-4678f31d3a51-serving-cert\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214310 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/550aea27-f732-47cb-9c04-93edb6216ae0-apiservice-cert\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214419 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c82qh\" (UniqueName: \"kubernetes.io/projected/63606771-b004-4903-a2a1-d5032a0fa94b-kube-api-access-c82qh\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214519 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-registry-certificates\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214596 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npdjz\" (UniqueName: \"kubernetes.io/projected/29524d66-9853-4dde-9ba5-9e124a055dd2-kube-api-access-npdjz\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214675 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f90130b9-9551-4823-9397-48b583729552-images\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214786 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/cc097706-3829-4b7b-9047-97a52667c825-signing-key\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214894 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7b8c\" (UniqueName: \"kubernetes.io/projected/f90130b9-9551-4823-9397-48b583729552-kube-api-access-w7b8c\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-ca\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7vtg\" (UniqueName: \"kubernetes.io/projected/df9aa967-eec9-4ce5-9c64-edff3aedca4a-kube-api-access-c7vtg\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215213 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-trusted-ca\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215363 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11220b25-0e89-43b7-95ef-4385c047753e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215541 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wr9x\" (UniqueName: \"kubernetes.io/projected/3d7c5acd-fc0a-4c18-8b24-59227b10369f-kube-api-access-2wr9x\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215669 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-stats-auth\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215774 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2faf43c0-49a3-41f7-a278-180d44c03689-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215912 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.215993 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-config\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216072 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc84p\" (UniqueName: \"kubernetes.io/projected/773d34d4-1723-42c4-8b83-43f629b630c2-kube-api-access-qc84p\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216148 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d7c5acd-fc0a-4c18-8b24-59227b10369f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216229 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/63606771-b004-4903-a2a1-d5032a0fa94b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216310 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-srv-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216418 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cffa0e5-cf3c-450b-b289-d52fe242ad11-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216510 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-registry-tls\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.216578 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.716560544 +0000 UTC m=+146.484732153 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216661 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-client-ca\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216721 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtfgm\" (UniqueName: \"kubernetes.io/projected/66ac15af-4bac-4898-a79d-3481df21bd0e-kube-api-access-gtfgm\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216747 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-certs\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216790 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m78vr\" (UniqueName: \"kubernetes.io/projected/59417585-2120-43f8-a16f-dc80dc6d1fd0-kube-api-access-m78vr\") pod \"migrator-59844c95c7-rw92q\" (UID: \"59417585-2120-43f8-a16f-dc80dc6d1fd0\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216814 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94pxb\" (UniqueName: \"kubernetes.io/projected/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-kube-api-access-94pxb\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216842 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtxz6\" (UniqueName: \"kubernetes.io/projected/9118a944-3b9c-4f67-917a-899581233d1e-kube-api-access-dtxz6\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216884 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11220b25-0e89-43b7-95ef-4385c047753e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216914 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c335028-5b89-477e-9bf1-e76f8f249d1a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.216973 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c4234d88-5b46-45ed-8a17-ac586277459f-proxy-tls\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217026 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-default-certificate\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217058 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217128 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217160 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-node-bootstrap-token\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217203 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-registry-certificates\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217360 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-plugins-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217454 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c335028-5b89-477e-9bf1-e76f8f249d1a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.214917 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-trusted-ca\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217891 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-config\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.218058 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-trusted-ca\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.218139 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-client-ca\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.218244 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-metrics-tls\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.217601 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9zxt\" (UniqueName: \"kubernetes.io/projected/550aea27-f732-47cb-9c04-93edb6216ae0-kube-api-access-w9zxt\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.219149 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-mountpoint-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.219243 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-service-ca\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.219288 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps4vz\" (UniqueName: \"kubernetes.io/projected/0cc75551-8b40-4ae7-a917-7aaa202313c4-kube-api-access-ps4vz\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.219350 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-cert\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.219395 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmbq4\" (UniqueName: \"kubernetes.io/projected/2faf43c0-49a3-41f7-a278-180d44c03689-kube-api-access-mmbq4\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.220282 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-service-ca\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.221615 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-oauth-config\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.221714 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c335028-5b89-477e-9bf1-e76f8f249d1a-config\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.221796 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/abdfc490-9871-4dd3-84b3-5f446f68f102-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-hzh6t\" (UID: \"abdfc490-9871-4dd3-84b3-5f446f68f102\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.221912 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ef794854-78b3-48a6-8fa0-3d2bd613ecd2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jfj9x\" (UID: \"ef794854-78b3-48a6-8fa0-3d2bd613ecd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.221955 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6486e25-f042-42ac-a9e4-8a399bf9b414-config-volume\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.221994 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-trusted-ca-bundle\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqktd\" (UniqueName: \"kubernetes.io/projected/11220b25-0e89-43b7-95ef-4385c047753e-kube-api-access-mqktd\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222120 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grxp7\" (UniqueName: \"kubernetes.io/projected/de02b605-6b0f-476b-9df3-fd41e3a320a2-kube-api-access-grxp7\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222234 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-bound-sa-token\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222273 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-csi-data-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222724 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-registry-tls\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222771 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ccfee3c-1083-4a51-a25b-4678f31d3a51-serving-cert\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222946 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-serving-cert\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.222952 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-service-ca\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.223061 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f90130b9-9551-4823-9397-48b583729552-proxy-tls\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.223103 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv78p\" (UniqueName: \"kubernetes.io/projected/e6486e25-f042-42ac-a9e4-8a399bf9b414-kube-api-access-pv78p\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.223136 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6gzv\" (UniqueName: \"kubernetes.io/projected/cc097706-3829-4b7b-9047-97a52667c825-kube-api-access-k6gzv\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.223220 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpq8s\" (UniqueName: \"kubernetes.io/projected/abdfc490-9871-4dd3-84b3-5f446f68f102-kube-api-access-gpq8s\") pod \"cluster-samples-operator-665b6dd947-hzh6t\" (UID: \"abdfc490-9871-4dd3-84b3-5f446f68f102\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.223344 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c335028-5b89-477e-9bf1-e76f8f249d1a-config\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.223714 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-trusted-ca-bundle\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.235123 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52150dbc-4724-4cc3-a326-5caaae27246e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.235198 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c335028-5b89-477e-9bf1-e76f8f249d1a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.235247 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-oauth-config\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.236690 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.252390 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.272394 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.292622 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.313644 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.324463 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.324663 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.824627581 +0000 UTC m=+146.592799190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.324834 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94pxb\" (UniqueName: \"kubernetes.io/projected/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-kube-api-access-94pxb\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.324895 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtxz6\" (UniqueName: \"kubernetes.io/projected/9118a944-3b9c-4f67-917a-899581233d1e-kube-api-access-dtxz6\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.324940 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11220b25-0e89-43b7-95ef-4385c047753e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.324989 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m78vr\" (UniqueName: \"kubernetes.io/projected/59417585-2120-43f8-a16f-dc80dc6d1fd0-kube-api-access-m78vr\") pod \"migrator-59844c95c7-rw92q\" (UID: \"59417585-2120-43f8-a16f-dc80dc6d1fd0\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.325043 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c4234d88-5b46-45ed-8a17-ac586277459f-proxy-tls\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.325090 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-default-certificate\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.325187 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326144 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326203 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-node-bootstrap-token\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326250 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9zxt\" (UniqueName: \"kubernetes.io/projected/550aea27-f732-47cb-9c04-93edb6216ae0-kube-api-access-w9zxt\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326288 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-mountpoint-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326360 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-plugins-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326442 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps4vz\" (UniqueName: \"kubernetes.io/projected/0cc75551-8b40-4ae7-a917-7aaa202313c4-kube-api-access-ps4vz\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326492 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmbq4\" (UniqueName: \"kubernetes.io/projected/2faf43c0-49a3-41f7-a278-180d44c03689-kube-api-access-mmbq4\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326496 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11220b25-0e89-43b7-95ef-4385c047753e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326540 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ef794854-78b3-48a6-8fa0-3d2bd613ecd2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jfj9x\" (UID: \"ef794854-78b3-48a6-8fa0-3d2bd613ecd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326563 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-mountpoint-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326630 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6486e25-f042-42ac-a9e4-8a399bf9b414-config-volume\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326778 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-cert\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326856 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqktd\" (UniqueName: \"kubernetes.io/projected/11220b25-0e89-43b7-95ef-4385c047753e-kube-api-access-mqktd\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.326953 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grxp7\" (UniqueName: \"kubernetes.io/projected/de02b605-6b0f-476b-9df3-fd41e3a320a2-kube-api-access-grxp7\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327034 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-csi-data-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-service-ca\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327152 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f90130b9-9551-4823-9397-48b583729552-proxy-tls\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327183 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-csi-data-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327202 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6gzv\" (UniqueName: \"kubernetes.io/projected/cc097706-3829-4b7b-9047-97a52667c825-kube-api-access-k6gzv\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327296 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv78p\" (UniqueName: \"kubernetes.io/projected/e6486e25-f042-42ac-a9e4-8a399bf9b414-kube-api-access-pv78p\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327375 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czwz9\" (UniqueName: \"kubernetes.io/projected/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-kube-api-access-czwz9\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327407 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29524d66-9853-4dde-9ba5-9e124a055dd2-config\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgcf5\" (UniqueName: \"kubernetes.io/projected/43fead9f-0d7a-4d82-8822-b4e83849d4ad-kube-api-access-tgcf5\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327493 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9118a944-3b9c-4f67-917a-899581233d1e-machine-approver-tls\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327524 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c4234d88-5b46-45ed-8a17-ac586277459f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327569 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327597 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29524d66-9853-4dde-9ba5-9e124a055dd2-serving-cert\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327621 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327650 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-config\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327676 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-profile-collector-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327707 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327735 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/cc097706-3829-4b7b-9047-97a52667c825-signing-cabundle\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327767 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m95rt\" (UniqueName: \"kubernetes.io/projected/0cffa0e5-cf3c-450b-b289-d52fe242ad11-kube-api-access-m95rt\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327503 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-plugins-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327812 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-registration-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327869 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-registration-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327958 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt2ml\" (UniqueName: \"kubernetes.io/projected/b426c32f-f8fd-4019-af68-7d5febadde67-kube-api-access-jt2ml\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.327998 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-service-ca\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328017 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fpwk\" (UniqueName: \"kubernetes.io/projected/fb91f88b-4fc5-464d-ac96-88b0828e67c0-kube-api-access-8fpwk\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328072 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328116 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cffa0e5-cf3c-450b-b289-d52fe242ad11-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328162 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-client\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328233 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328276 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-metrics-certs\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328293 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c4234d88-5b46-45ed-8a17-ac586277459f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328314 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-config\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pclv\" (UniqueName: \"kubernetes.io/projected/c4666640-fb87-46f0-8731-f9946f1f7470-kube-api-access-7pclv\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328418 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328460 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d7c5acd-fc0a-4c18-8b24-59227b10369f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328509 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2faf43c0-49a3-41f7-a278-180d44c03689-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328563 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/550aea27-f732-47cb-9c04-93edb6216ae0-tmpfs\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328607 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-srv-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328655 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4666640-fb87-46f0-8731-f9946f1f7470-serving-cert\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rckqf\" (UniqueName: \"kubernetes.io/projected/ef794854-78b3-48a6-8fa0-3d2bd613ecd2-kube-api-access-rckqf\") pod \"multus-admission-controller-857f4d67dd-jfj9x\" (UID: \"ef794854-78b3-48a6-8fa0-3d2bd613ecd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/550aea27-f732-47cb-9c04-93edb6216ae0-webhook-cert\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328776 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e6486e25-f042-42ac-a9e4-8a399bf9b414-metrics-tls\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328812 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f90130b9-9551-4823-9397-48b583729552-auth-proxy-config\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328859 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9118a944-3b9c-4f67-917a-899581233d1e-auth-proxy-config\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328899 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b426c32f-f8fd-4019-af68-7d5febadde67-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.328965 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329019 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-socket-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329061 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fb91f88b-4fc5-464d-ac96-88b0828e67c0-service-ca-bundle\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329102 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2faf43c0-49a3-41f7-a278-180d44c03689-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329141 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9118a944-3b9c-4f67-917a-899581233d1e-config\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329179 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329242 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc4gd\" (UniqueName: \"kubernetes.io/projected/c4234d88-5b46-45ed-8a17-ac586277459f-kube-api-access-hc4gd\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329296 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/550aea27-f732-47cb-9c04-93edb6216ae0-apiservice-cert\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329366 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c82qh\" (UniqueName: \"kubernetes.io/projected/63606771-b004-4903-a2a1-d5032a0fa94b-kube-api-access-c82qh\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329418 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npdjz\" (UniqueName: \"kubernetes.io/projected/29524d66-9853-4dde-9ba5-9e124a055dd2-kube-api-access-npdjz\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329479 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f90130b9-9551-4823-9397-48b583729552-images\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329522 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/cc097706-3829-4b7b-9047-97a52667c825-signing-key\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329575 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7b8c\" (UniqueName: \"kubernetes.io/projected/f90130b9-9551-4823-9397-48b583729552-kube-api-access-w7b8c\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329750 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-ca\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329828 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/de02b605-6b0f-476b-9df3-fd41e3a320a2-socket-dir\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329853 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11220b25-0e89-43b7-95ef-4385c047753e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-stats-auth\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.329960 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2faf43c0-49a3-41f7-a278-180d44c03689-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330016 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330057 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wr9x\" (UniqueName: \"kubernetes.io/projected/3d7c5acd-fc0a-4c18-8b24-59227b10369f-kube-api-access-2wr9x\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330094 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc84p\" (UniqueName: \"kubernetes.io/projected/773d34d4-1723-42c4-8b83-43f629b630c2-kube-api-access-qc84p\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330136 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d7c5acd-fc0a-4c18-8b24-59227b10369f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330173 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/63606771-b004-4903-a2a1-d5032a0fa94b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330210 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-srv-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330248 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cffa0e5-cf3c-450b-b289-d52fe242ad11-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330302 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtfgm\" (UniqueName: \"kubernetes.io/projected/66ac15af-4bac-4898-a79d-3481df21bd0e-kube-api-access-gtfgm\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330431 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-certs\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330511 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f90130b9-9551-4823-9397-48b583729552-auth-proxy-config\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330795 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/550aea27-f732-47cb-9c04-93edb6216ae0-tmpfs\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.330925 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fb91f88b-4fc5-464d-ac96-88b0828e67c0-service-ca-bundle\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.331193 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.831166436 +0000 UTC m=+146.599338085 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.331995 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2faf43c0-49a3-41f7-a278-180d44c03689-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.334775 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4666640-fb87-46f0-8731-f9946f1f7470-serving-cert\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.335385 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.335950 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-client\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.336832 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-default-certificate\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.337261 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2faf43c0-49a3-41f7-a278-180d44c03689-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.337572 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-metrics-certs\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.337789 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fb91f88b-4fc5-464d-ac96-88b0828e67c0-stats-auth\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.339119 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11220b25-0e89-43b7-95ef-4385c047753e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.352596 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.359981 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-config\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.372828 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.382286 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c4666640-fb87-46f0-8731-f9946f1f7470-etcd-ca\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.393764 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.412736 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.432095 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.432446 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.93240397 +0000 UTC m=+146.700575619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.432899 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.433780 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.433940 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:45.933918327 +0000 UTC m=+146.702089976 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.447094 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0cffa0e5-cf3c-450b-b289-d52fe242ad11-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.452801 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.460237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0cffa0e5-cf3c-450b-b289-d52fe242ad11-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.473106 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.493468 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.512896 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.533299 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.535631 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.535970 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.035903905 +0000 UTC m=+146.804075584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.536248 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.537007 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.036974479 +0000 UTC m=+146.805146128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.543883 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.552935 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.559738 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.573665 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.593697 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.600945 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9118a944-3b9c-4f67-917a-899581233d1e-auth-proxy-config\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.614314 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.622768 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9118a944-3b9c-4f67-917a-899581233d1e-config\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.633461 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.639172 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.639388 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.139319567 +0000 UTC m=+146.907491216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.640392 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.641302 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.141272638 +0000 UTC m=+146.909444287 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.653577 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.673017 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.683896 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9118a944-3b9c-4f67-917a-899581233d1e-machine-approver-tls\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.720539 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cft2r\" (UniqueName: \"kubernetes.io/projected/351c18c6-1ef2-4555-a9cd-03b9a8b342a4-kube-api-access-cft2r\") pod \"authentication-operator-69f744f599-pkrwj\" (UID: \"351c18c6-1ef2-4555-a9cd-03b9a8b342a4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.741284 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sknc\" (UniqueName: \"kubernetes.io/projected/94a81839-4806-4112-8c30-fb70049d8296-kube-api-access-9sknc\") pod \"openshift-config-operator-7777fb866f-526zm\" (UID: \"94a81839-4806-4112-8c30-fb70049d8296\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.742799 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.743202 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.243155183 +0000 UTC m=+147.011326832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.743979 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.744403 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.244388421 +0000 UTC m=+147.012560030 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.749661 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4md6\" (UniqueName: \"kubernetes.io/projected/179be37e-b722-4f7b-ab6a-88fb1407c2e7-kube-api-access-t4md6\") pod \"apiserver-76f77b778f-frzl6\" (UID: \"179be37e-b722-4f7b-ab6a-88fb1407c2e7\") " pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.767653 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z58l6\" (UniqueName: \"kubernetes.io/projected/6c62f053-996e-44b6-9a65-3d7f292b6cef-kube-api-access-z58l6\") pod \"oauth-openshift-558db77b4-xrszt\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.789822 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.789853 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s96gj\" (UniqueName: \"kubernetes.io/projected/f38e9faf-5fe2-49c8-8516-2b5f2766199e-kube-api-access-s96gj\") pod \"apiserver-7bbb656c7d-rb8kg\" (UID: \"f38e9faf-5fe2-49c8-8516-2b5f2766199e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.790388 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.833644 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.840749 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6krj\" (UniqueName: \"kubernetes.io/projected/843edada-6eb9-46da-ba98-05ccfcd4cb1b-kube-api-access-x6krj\") pod \"controller-manager-879f6c89f-vhvpc\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.846205 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.846582 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.346542284 +0000 UTC m=+147.114713933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.847589 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.848270 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.348238867 +0000 UTC m=+147.116410486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.853356 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.873945 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.876658 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ef794854-78b3-48a6-8fa0-3d2bd613ecd2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-jfj9x\" (UID: \"ef794854-78b3-48a6-8fa0-3d2bd613ecd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.876990 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.893056 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.908144 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d7c5acd-fc0a-4c18-8b24-59227b10369f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.913187 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.920774 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d7c5acd-fc0a-4c18-8b24-59227b10369f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.921509 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.933481 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.949212 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.949406 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.449379248 +0000 UTC m=+147.217550867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.950175 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:45 crc kubenswrapper[4941]: E1130 06:48:45.950845 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.450781192 +0000 UTC m=+147.218952811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.952689 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.957318 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:45 crc kubenswrapper[4941]: I1130 06:48:45.973373 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.021149 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqv8f\" (UniqueName: \"kubernetes.io/projected/465a2899-647c-4144-8810-46a4a4e49909-kube-api-access-nqv8f\") pod \"machine-api-operator-5694c8668f-mt5dk\" (UID: \"465a2899-647c-4144-8810-46a4a4e49909\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.040867 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7q5m\" (UniqueName: \"kubernetes.io/projected/ab7371ad-5001-4223-84f1-c7d55ce3c5f3-kube-api-access-d7q5m\") pod \"dns-operator-744455d44c-4qn2t\" (UID: \"ab7371ad-5001-4223-84f1-c7d55ce3c5f3\") " pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.049561 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.051526 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.051637 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.551612034 +0000 UTC m=+147.319783653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.052070 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.052521 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.552509301 +0000 UTC m=+147.320680920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.053162 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.062834 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzvtl\" (UniqueName: \"kubernetes.io/projected/8f1969d8-c7f9-4c39-9727-e27a26020d46-kube-api-access-bzvtl\") pod \"console-operator-58897d9998-gjx4m\" (UID: \"8f1969d8-c7f9-4c39-9727-e27a26020d46\") " pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.074168 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.085512 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.097027 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.099275 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-config\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.104194 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.110952 4941 request.go:700] Waited for 1.016999956s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca/secrets?fieldSelector=metadata.name%3Dservice-ca-dockercfg-pn86c&limit=500&resourceVersion=0 Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.113841 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.138402 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.141993 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.144514 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.153623 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.157268 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.157543 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.657500263 +0000 UTC m=+147.425671882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.158107 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.158587 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.658568637 +0000 UTC m=+147.426740256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.173117 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.180183 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/cc097706-3829-4b7b-9047-97a52667c825-signing-cabundle\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.195510 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.208190 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/cc097706-3829-4b7b-9047-97a52667c825-signing-key\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.213305 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.239916 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.257683 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.260019 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.260942 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.760927446 +0000 UTC m=+147.529099055 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.261963 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f90130b9-9551-4823-9397-48b583729552-images\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.264823 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f90130b9-9551-4823-9397-48b583729552-proxy-tls\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.277254 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.298856 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.303992 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/550aea27-f732-47cb-9c04-93edb6216ae0-webhook-cert\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.309794 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/550aea27-f732-47cb-9c04-93edb6216ae0-apiservice-cert\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.313366 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.326287 4941 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.326408 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4234d88-5b46-45ed-8a17-ac586277459f-proxy-tls podName:c4234d88-5b46-45ed-8a17-ac586277459f nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.826380022 +0000 UTC m=+147.594551631 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/c4234d88-5b46-45ed-8a17-ac586277459f-proxy-tls") pod "machine-config-controller-84d6567774-qhxrz" (UID: "c4234d88-5b46-45ed-8a17-ac586277459f") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.326520 4941 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.326625 4941 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.326669 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume podName:773d34d4-1723-42c4-8b83-43f629b630c2 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.826596729 +0000 UTC m=+147.594768338 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-volume" (UniqueName: "kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume") pod "collect-profiles-29408085-x6l8j" (UID: "773d34d4-1723-42c4-8b83-43f629b630c2") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.326725 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-node-bootstrap-token podName:ad1e76c8-6a3f-4012-ab00-ebaba2e536ac nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.826693671 +0000 UTC m=+147.594865320 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-node-bootstrap-token") pod "machine-config-server-hcx8m" (UID: "ad1e76c8-6a3f-4012-ab00-ebaba2e536ac") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327454 4941 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327523 4941 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327572 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e6486e25-f042-42ac-a9e4-8a399bf9b414-config-volume podName:e6486e25-f042-42ac-a9e4-8a399bf9b414 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.827562438 +0000 UTC m=+147.595734047 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/e6486e25-f042-42ac-a9e4-8a399bf9b414-config-volume") pod "dns-default-st8vq" (UID: "e6486e25-f042-42ac-a9e4-8a399bf9b414") : failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327589 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-cert podName:6b3b1c30-7677-4bff-85a6-80dd7a3548b0 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.827580859 +0000 UTC m=+147.595752468 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-cert") pod "ingress-canary-57xxf" (UID: "6b3b1c30-7677-4bff-85a6-80dd7a3548b0") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327648 4941 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327703 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/29524d66-9853-4dde-9ba5-9e124a055dd2-config podName:29524d66-9853-4dde-9ba5-9e124a055dd2 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.827695872 +0000 UTC m=+147.595867481 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/29524d66-9853-4dde-9ba5-9e124a055dd2-config") pod "service-ca-operator-777779d784-tvt4t" (UID: "29524d66-9853-4dde-9ba5-9e124a055dd2") : failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327845 4941 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327864 4941 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327881 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-profile-collector-cert podName:0cc75551-8b40-4ae7-a917-7aaa202313c4 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.827874598 +0000 UTC m=+147.596046207 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-profile-collector-cert") pod "catalog-operator-68c6474976-g9klz" (UID: "0cc75551-8b40-4ae7-a917-7aaa202313c4") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327896 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29524d66-9853-4dde-9ba5-9e124a055dd2-serving-cert podName:29524d66-9853-4dde-9ba5-9e124a055dd2 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.827889118 +0000 UTC m=+147.596060727 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/29524d66-9853-4dde-9ba5-9e124a055dd2-serving-cert") pod "service-ca-operator-777779d784-tvt4t" (UID: "29524d66-9853-4dde-9ba5-9e124a055dd2") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327899 4941 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.327962 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics podName:43fead9f-0d7a-4d82-8822-b4e83849d4ad nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.82795207 +0000 UTC m=+147.596123679 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics") pod "marketplace-operator-79b997595-vw95c" (UID: "43fead9f-0d7a-4d82-8822-b4e83849d4ad") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.328738 4941 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.328810 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume podName:773d34d4-1723-42c4-8b83-43f629b630c2 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.828795067 +0000 UTC m=+147.596966756 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume") pod "collect-profiles-29408085-x6l8j" (UID: "773d34d4-1723-42c4-8b83-43f629b630c2") : failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.329799 4941 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.329850 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6486e25-f042-42ac-a9e4-8a399bf9b414-metrics-tls podName:e6486e25-f042-42ac-a9e4-8a399bf9b414 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.829837309 +0000 UTC m=+147.598009028 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/e6486e25-f042-42ac-a9e4-8a399bf9b414-metrics-tls") pod "dns-default-st8vq" (UID: "e6486e25-f042-42ac-a9e4-8a399bf9b414") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.329876 4941 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.329904 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b426c32f-f8fd-4019-af68-7d5febadde67-package-server-manager-serving-cert podName:b426c32f-f8fd-4019-af68-7d5febadde67 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.829896781 +0000 UTC m=+147.598068610 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/b426c32f-f8fd-4019-af68-7d5febadde67-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-dgbsb" (UID: "b426c32f-f8fd-4019-af68-7d5febadde67") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.329938 4941 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.330007 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca podName:43fead9f-0d7a-4d82-8822-b4e83849d4ad nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.829983804 +0000 UTC m=+147.598155613 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca") pod "marketplace-operator-79b997595-vw95c" (UID: "43fead9f-0d7a-4d82-8822-b4e83849d4ad") : failed to sync configmap cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331270 4941 secret.go:188] Couldn't get secret openshift-machine-api/control-plane-machine-set-operator-tls: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331346 4941 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331380 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/63606771-b004-4903-a2a1-d5032a0fa94b-control-plane-machine-set-operator-tls podName:63606771-b004-4903-a2a1-d5032a0fa94b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.831368347 +0000 UTC m=+147.599540056 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "control-plane-machine-set-operator-tls" (UniqueName: "kubernetes.io/secret/63606771-b004-4903-a2a1-d5032a0fa94b-control-plane-machine-set-operator-tls") pod "control-plane-machine-set-operator-78cbb6b69f-ppcvh" (UID: "63606771-b004-4903-a2a1-d5032a0fa94b") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331413 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-certs podName:ad1e76c8-6a3f-4012-ab00-ebaba2e536ac nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.831398668 +0000 UTC m=+147.599570387 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-certs") pod "machine-config-server-hcx8m" (UID: "ad1e76c8-6a3f-4012-ab00-ebaba2e536ac") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331417 4941 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331450 4941 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331459 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-profile-collector-cert podName:66ac15af-4bac-4898-a79d-3481df21bd0e nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.8314492 +0000 UTC m=+147.599621009 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-profile-collector-cert") pod "olm-operator-6b444d44fb-28g6q" (UID: "66ac15af-4bac-4898-a79d-3481df21bd0e") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331490 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-srv-cert podName:66ac15af-4bac-4898-a79d-3481df21bd0e nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.831481921 +0000 UTC m=+147.599653760 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-srv-cert") pod "olm-operator-6b444d44fb-28g6q" (UID: "66ac15af-4bac-4898-a79d-3481df21bd0e") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331578 4941 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.331623 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-srv-cert podName:0cc75551-8b40-4ae7-a917-7aaa202313c4 nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.831610935 +0000 UTC m=+147.599782544 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-srv-cert") pod "catalog-operator-68c6474976-g9klz" (UID: "0cc75551-8b40-4ae7-a917-7aaa202313c4") : failed to sync secret cache: timed out waiting for the condition Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.333701 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.338970 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-frzl6"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.355579 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.362523 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.362977 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.862959605 +0000 UTC m=+147.631131214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.372068 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.393131 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.416069 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.434055 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.453234 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.464492 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.465253 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:46.965233071 +0000 UTC m=+147.733404680 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.473880 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.492942 4941 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.513197 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.533255 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.552933 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.566879 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.567065 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.567277 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.567318 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.567400 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.567968 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.067942491 +0000 UTC m=+147.836114100 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.568299 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.572180 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.573145 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.573650 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.575590 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.593081 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.612950 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.632696 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.639347 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.654611 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.673875 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.674283 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.174232873 +0000 UTC m=+147.942404482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.675243 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.675633 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrszt"] Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.675848 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.175831344 +0000 UTC m=+147.944002953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.676003 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.693212 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.695405 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-526zm"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.697646 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.697704 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4qn2t"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.725197 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.725217 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pkrwj"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.745778 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.746438 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.752121 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.752404 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vhvpc"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.759134 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.771920 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-mt5dk"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.773622 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-gjx4m"] Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.774571 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.776029 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.776292 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.276166539 +0000 UTC m=+148.044338158 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.776362 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.776747 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.276730867 +0000 UTC m=+148.044902486 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.805053 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.813514 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.833388 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.852970 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.873717 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.877874 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878133 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878158 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-node-bootstrap-token\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878213 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6486e25-f042-42ac-a9e4-8a399bf9b414-config-volume\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878231 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-cert\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878348 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29524d66-9853-4dde-9ba5-9e124a055dd2-config\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878382 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29524d66-9853-4dde-9ba5-9e124a055dd2-serving-cert\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-profile-collector-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878435 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878508 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878537 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-srv-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878579 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b426c32f-f8fd-4019-af68-7d5febadde67-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878595 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e6486e25-f042-42ac-a9e4-8a399bf9b414-metrics-tls\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878617 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878659 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878760 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/63606771-b004-4903-a2a1-d5032a0fa94b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-srv-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878818 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-certs\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.878864 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c4234d88-5b46-45ed-8a17-ac586277459f-proxy-tls\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.879047 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.379007833 +0000 UTC m=+148.147179442 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.880855 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29524d66-9853-4dde-9ba5-9e124a055dd2-config\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.881028 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.884459 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.885486 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.887456 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b426c32f-f8fd-4019-af68-7d5febadde67-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.892682 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-certs\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.893547 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.894167 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29524d66-9853-4dde-9ba5-9e124a055dd2-serving-cert\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.895772 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-srv-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.896884 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-srv-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.897527 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/63606771-b004-4903-a2a1-d5032a0fa94b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.898523 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0cc75551-8b40-4ae7-a917-7aaa202313c4-profile-collector-cert\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.901290 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c4234d88-5b46-45ed-8a17-ac586277459f-proxy-tls\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.906990 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66ac15af-4bac-4898-a79d-3481df21bd0e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.908583 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.915066 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.928976 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-node-bootstrap-token\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.935592 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.959019 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.974991 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.977123 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-cert\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.980628 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:46 crc kubenswrapper[4941]: E1130 06:48:46.981071 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.481054273 +0000 UTC m=+148.249225882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:46 crc kubenswrapper[4941]: I1130 06:48:46.993815 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.014503 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 30 06:48:47 crc kubenswrapper[4941]: W1130 06:48:47.018075 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-852a62267be4b5ce40a9b757d8bd4d694d055572171d8c13a0ce6bec0035bf62 WatchSource:0}: Error finding container 852a62267be4b5ce40a9b757d8bd4d694d055572171d8c13a0ce6bec0035bf62: Status 404 returned error can't find the container with id 852a62267be4b5ce40a9b757d8bd4d694d055572171d8c13a0ce6bec0035bf62 Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.026832 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e6486e25-f042-42ac-a9e4-8a399bf9b414-metrics-tls\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.041147 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.054896 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.060625 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e6486e25-f042-42ac-a9e4-8a399bf9b414-config-volume\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.084603 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.084827 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.584773625 +0000 UTC m=+148.352945574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.085185 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.086212 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.586197289 +0000 UTC m=+148.354368888 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.101971 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.108140 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24xhs\" (UniqueName: \"kubernetes.io/projected/5ccfee3c-1083-4a51-a25b-4678f31d3a51-kube-api-access-24xhs\") pod \"route-controller-manager-6576b87f9c-hzwfd\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.111153 4941 request.go:700] Waited for 1.90017875s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/serviceaccounts/ingress-operator/token Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.133651 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4425t\" (UniqueName: \"kubernetes.io/projected/0d47c312-eb2e-4f83-8d9d-bd30bfd989dd-kube-api-access-4425t\") pod \"ingress-operator-5b745b69d9-jxshm\" (UID: \"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.148750 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8ngh\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-kube-api-access-x8ngh\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.176555 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l62tx\" (UniqueName: \"kubernetes.io/projected/542c0442-1fb2-4e3e-ba06-a14526cf98ce-kube-api-access-l62tx\") pod \"downloads-7954f5f757-vkq5m\" (UID: \"542c0442-1fb2-4e3e-ba06-a14526cf98ce\") " pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.186161 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.186696 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.686674629 +0000 UTC m=+148.454846228 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.192352 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7vtg\" (UniqueName: \"kubernetes.io/projected/df9aa967-eec9-4ce5-9c64-edff3aedca4a-kube-api-access-c7vtg\") pod \"console-f9d7485db-css84\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.209035 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c335028-5b89-477e-9bf1-e76f8f249d1a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9bcts\" (UID: \"5c335028-5b89-477e-9bf1-e76f8f249d1a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.215887 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.229691 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.229729 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.234113 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-bound-sa-token\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.235588 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.244088 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.251736 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpq8s\" (UniqueName: \"kubernetes.io/projected/abdfc490-9871-4dd3-84b3-5f446f68f102-kube-api-access-gpq8s\") pod \"cluster-samples-operator-665b6dd947-hzh6t\" (UID: \"abdfc490-9871-4dd3-84b3-5f446f68f102\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.269563 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.272281 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtxz6\" (UniqueName: \"kubernetes.io/projected/9118a944-3b9c-4f67-917a-899581233d1e-kube-api-access-dtxz6\") pod \"machine-approver-56656f9798-cpxtg\" (UID: \"9118a944-3b9c-4f67-917a-899581233d1e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.289155 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.289514 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.789495854 +0000 UTC m=+148.557667463 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.292311 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94pxb\" (UniqueName: \"kubernetes.io/projected/6b3b1c30-7677-4bff-85a6-80dd7a3548b0-kube-api-access-94pxb\") pod \"ingress-canary-57xxf\" (UID: \"6b3b1c30-7677-4bff-85a6-80dd7a3548b0\") " pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.305674 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.312901 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m78vr\" (UniqueName: \"kubernetes.io/projected/59417585-2120-43f8-a16f-dc80dc6d1fd0-kube-api-access-m78vr\") pod \"migrator-59844c95c7-rw92q\" (UID: \"59417585-2120-43f8-a16f-dc80dc6d1fd0\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.316911 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b68b85e481d3d114de6d704da65ff3e7210acf8f76cf0479749dd7d28a58615f"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.318758 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" event={"ID":"8f1969d8-c7f9-4c39-9727-e27a26020d46","Type":"ContainerStarted","Data":"fa4ecdabbac0f21867f0445f628e1d6c5a2dcea20b46bd3b02cfb7d7e5c99b0d"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.318808 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" event={"ID":"8f1969d8-c7f9-4c39-9727-e27a26020d46","Type":"ContainerStarted","Data":"2db163089b67d01128d1ba81cc2660821f72e25ea1dfc67896f182f288b79479"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.319913 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.321422 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" event={"ID":"351c18c6-1ef2-4555-a9cd-03b9a8b342a4","Type":"ContainerStarted","Data":"6ef1aba167a9778fdd93abfd0bf2f1e5188e101b58c5e34bc339ec599e682d42"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.321444 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" event={"ID":"351c18c6-1ef2-4555-a9cd-03b9a8b342a4","Type":"ContainerStarted","Data":"b2349b41c0537abbaa1e774de0321c0dcdb670b398124a2c35b76d19a4d9eec1"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.324710 4941 patch_prober.go:28] interesting pod/console-operator-58897d9998-gjx4m container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.324755 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" podUID="8f1969d8-c7f9-4c39-9727-e27a26020d46" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.328549 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bf402eed-bc58-4a99-ab0c-bd75b072ebf9-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rxb6\" (UID: \"bf402eed-bc58-4a99-ab0c-bd75b072ebf9\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.354431 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9zxt\" (UniqueName: \"kubernetes.io/projected/550aea27-f732-47cb-9c04-93edb6216ae0-kube-api-access-w9zxt\") pod \"packageserver-d55dfcdfc-sk4xx\" (UID: \"550aea27-f732-47cb-9c04-93edb6216ae0\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.360054 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.374780 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps4vz\" (UniqueName: \"kubernetes.io/projected/0cc75551-8b40-4ae7-a917-7aaa202313c4-kube-api-access-ps4vz\") pod \"catalog-operator-68c6474976-g9klz\" (UID: \"0cc75551-8b40-4ae7-a917-7aaa202313c4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.389311 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmbq4\" (UniqueName: \"kubernetes.io/projected/2faf43c0-49a3-41f7-a278-180d44c03689-kube-api-access-mmbq4\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.391112 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.395081 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.895061513 +0000 UTC m=+148.663233122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.408049 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" event={"ID":"f38e9faf-5fe2-49c8-8516-2b5f2766199e","Type":"ContainerStarted","Data":"9d4aad0bae6c33a0a406069d733ae775c97f292496cd2a32be919f903affb2b0"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.429535 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" event={"ID":"465a2899-647c-4144-8810-46a4a4e49909","Type":"ContainerStarted","Data":"fa3fb9a817cd98f353c3100a305abb0a1b129928c9a1cc1ee7a30265a0874c93"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.429609 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" event={"ID":"465a2899-647c-4144-8810-46a4a4e49909","Type":"ContainerStarted","Data":"e2364bc88e458200d455ac242d61f2e37553fe1ebd2591824b8dbe6b3ce93e7e"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.431105 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.432934 4941 generic.go:334] "Generic (PLEG): container finished" podID="94a81839-4806-4112-8c30-fb70049d8296" containerID="76d49097fb90123294acfbe6fa1f57589800873ef68c90a6de1c1b0a1ef6a9d0" exitCode=0 Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.433003 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" event={"ID":"94a81839-4806-4112-8c30-fb70049d8296","Type":"ContainerDied","Data":"76d49097fb90123294acfbe6fa1f57589800873ef68c90a6de1c1b0a1ef6a9d0"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.433035 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" event={"ID":"94a81839-4806-4112-8c30-fb70049d8296","Type":"ContainerStarted","Data":"eaa2b1987def53962999c1b1b273fcda36e50544ca56c9a23e0fdd1de0b915de"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.434638 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqktd\" (UniqueName: \"kubernetes.io/projected/11220b25-0e89-43b7-95ef-4385c047753e-kube-api-access-mqktd\") pod \"openshift-controller-manager-operator-756b6f6bc6-fmnw2\" (UID: \"11220b25-0e89-43b7-95ef-4385c047753e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.435215 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grxp7\" (UniqueName: \"kubernetes.io/projected/de02b605-6b0f-476b-9df3-fd41e3a320a2-kube-api-access-grxp7\") pod \"csi-hostpathplugin-46p7c\" (UID: \"de02b605-6b0f-476b-9df3-fd41e3a320a2\") " pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.442721 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" event={"ID":"843edada-6eb9-46da-ba98-05ccfcd4cb1b","Type":"ContainerStarted","Data":"3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.443157 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" event={"ID":"843edada-6eb9-46da-ba98-05ccfcd4cb1b","Type":"ContainerStarted","Data":"7484892378b3e479b528319d4f480cce493c6115f46546e4792b63914081df53"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.443575 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.449539 4941 generic.go:334] "Generic (PLEG): container finished" podID="179be37e-b722-4f7b-ab6a-88fb1407c2e7" containerID="c39adb0de421846f151111154e6b33e16dbfee0df7c1d1c9333f6ed3433f24c8" exitCode=0 Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.450041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" event={"ID":"179be37e-b722-4f7b-ab6a-88fb1407c2e7","Type":"ContainerDied","Data":"c39adb0de421846f151111154e6b33e16dbfee0df7c1d1c9333f6ed3433f24c8"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.450074 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" event={"ID":"179be37e-b722-4f7b-ab6a-88fb1407c2e7","Type":"ContainerStarted","Data":"6913d9b75b1431aa44943ddce01864d53a52cd72536bae18b207ac5c74d8e7a1"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.452886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6gzv\" (UniqueName: \"kubernetes.io/projected/cc097706-3829-4b7b-9047-97a52667c825-kube-api-access-k6gzv\") pod \"service-ca-9c57cc56f-zvvg4\" (UID: \"cc097706-3829-4b7b-9047-97a52667c825\") " pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.454933 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.458489 4941 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-vhvpc container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.458538 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" podUID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.464566 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ce9a5316f00c73921d1ce301364d510e89cc72b4a7fcfd2fa4aaa828ba44be51"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.474479 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" event={"ID":"6c62f053-996e-44b6-9a65-3d7f292b6cef","Type":"ContainerStarted","Data":"4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.474524 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" event={"ID":"6c62f053-996e-44b6-9a65-3d7f292b6cef","Type":"ContainerStarted","Data":"980edae3ab499bf5e801714dfe18278d39944f00ebffacd09ab0d67123b1c1b3"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.475383 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.476456 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv78p\" (UniqueName: \"kubernetes.io/projected/e6486e25-f042-42ac-a9e4-8a399bf9b414-kube-api-access-pv78p\") pod \"dns-default-st8vq\" (UID: \"e6486e25-f042-42ac-a9e4-8a399bf9b414\") " pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.478535 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"852a62267be4b5ce40a9b757d8bd4d694d055572171d8c13a0ce6bec0035bf62"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.480444 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" event={"ID":"ab7371ad-5001-4223-84f1-c7d55ce3c5f3","Type":"ContainerStarted","Data":"cc8db16571de839e7babcc2878aa524d9555bbfc309b22e62ecf5ca73e16469b"} Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.490957 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-57xxf" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.492902 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.494880 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:47.994859992 +0000 UTC m=+148.763031601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.497896 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czwz9\" (UniqueName: \"kubernetes.io/projected/ad1e76c8-6a3f-4012-ab00-ebaba2e536ac-kube-api-access-czwz9\") pod \"machine-config-server-hcx8m\" (UID: \"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac\") " pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.497921 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.512211 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgcf5\" (UniqueName: \"kubernetes.io/projected/43fead9f-0d7a-4d82-8822-b4e83849d4ad-kube-api-access-tgcf5\") pod \"marketplace-operator-79b997595-vw95c\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.560067 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m95rt\" (UniqueName: \"kubernetes.io/projected/0cffa0e5-cf3c-450b-b289-d52fe242ad11-kube-api-access-m95rt\") pod \"openshift-apiserver-operator-796bbdcf4f-mvbfz\" (UID: \"0cffa0e5-cf3c-450b-b289-d52fe242ad11\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.562794 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt2ml\" (UniqueName: \"kubernetes.io/projected/b426c32f-f8fd-4019-af68-7d5febadde67-kube-api-access-jt2ml\") pod \"package-server-manager-789f6589d5-dgbsb\" (UID: \"b426c32f-f8fd-4019-af68-7d5febadde67\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.567960 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.573838 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2hstq\" (UID: \"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.588781 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.592742 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fpwk\" (UniqueName: \"kubernetes.io/projected/fb91f88b-4fc5-464d-ac96-88b0828e67c0-kube-api-access-8fpwk\") pod \"router-default-5444994796-rwj5c\" (UID: \"fb91f88b-4fc5-464d-ac96-88b0828e67c0\") " pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.594954 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.596505 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.597526 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.09748618 +0000 UTC m=+148.865657789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.636515 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.637749 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pclv\" (UniqueName: \"kubernetes.io/projected/c4666640-fb87-46f0-8731-f9946f1f7470-kube-api-access-7pclv\") pod \"etcd-operator-b45778765-pgpzd\" (UID: \"c4666640-fb87-46f0-8731-f9946f1f7470\") " pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.638219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rckqf\" (UniqueName: \"kubernetes.io/projected/ef794854-78b3-48a6-8fa0-3d2bd613ecd2-kube-api-access-rckqf\") pod \"multus-admission-controller-857f4d67dd-jfj9x\" (UID: \"ef794854-78b3-48a6-8fa0-3d2bd613ecd2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.645764 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.660360 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npdjz\" (UniqueName: \"kubernetes.io/projected/29524d66-9853-4dde-9ba5-9e124a055dd2-kube-api-access-npdjz\") pod \"service-ca-operator-777779d784-tvt4t\" (UID: \"29524d66-9853-4dde-9ba5-9e124a055dd2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.670545 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.685967 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wr9x\" (UniqueName: \"kubernetes.io/projected/3d7c5acd-fc0a-4c18-8b24-59227b10369f-kube-api-access-2wr9x\") pod \"kube-storage-version-migrator-operator-b67b599dd-4m85s\" (UID: \"3d7c5acd-fc0a-4c18-8b24-59227b10369f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.691050 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc84p\" (UniqueName: \"kubernetes.io/projected/773d34d4-1723-42c4-8b83-43f629b630c2-kube-api-access-qc84p\") pod \"collect-profiles-29408085-x6l8j\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.696664 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.697593 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.697963 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.19795087 +0000 UTC m=+148.966122479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.704709 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.726434 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.727682 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc4gd\" (UniqueName: \"kubernetes.io/projected/c4234d88-5b46-45ed-8a17-ac586277459f-kube-api-access-hc4gd\") pod \"machine-config-controller-84d6567774-qhxrz\" (UID: \"c4234d88-5b46-45ed-8a17-ac586277459f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.764756 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtfgm\" (UniqueName: \"kubernetes.io/projected/66ac15af-4bac-4898-a79d-3481df21bd0e-kube-api-access-gtfgm\") pod \"olm-operator-6b444d44fb-28g6q\" (UID: \"66ac15af-4bac-4898-a79d-3481df21bd0e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.764810 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7b8c\" (UniqueName: \"kubernetes.io/projected/f90130b9-9551-4823-9397-48b583729552-kube-api-access-w7b8c\") pod \"machine-config-operator-74547568cd-4rj4n\" (UID: \"f90130b9-9551-4823-9397-48b583729552\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.769288 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.784388 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-hcx8m" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.785055 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.786036 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c82qh\" (UniqueName: \"kubernetes.io/projected/63606771-b004-4903-a2a1-d5032a0fa94b-kube-api-access-c82qh\") pod \"control-plane-machine-set-operator-78cbb6b69f-ppcvh\" (UID: \"63606771-b004-4903-a2a1-d5032a0fa94b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.798659 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.799021 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.299005738 +0000 UTC m=+149.067177347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.803836 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2faf43c0-49a3-41f7-a278-180d44c03689-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-4n8bt\" (UID: \"2faf43c0-49a3-41f7-a278-180d44c03689\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.840820 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd"] Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.864705 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.872235 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.878710 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.900568 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:47 crc kubenswrapper[4941]: E1130 06:48:47.900979 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.400966994 +0000 UTC m=+149.169138603 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.912057 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.919586 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.939611 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts"] Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.948631 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t"] Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.978744 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-46p7c"] Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.980417 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-vkq5m"] Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.982576 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.982694 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.983367 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-css84"] Nov 30 06:48:47 crc kubenswrapper[4941]: I1130 06:48:47.983986 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.001575 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.002191 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.502150867 +0000 UTC m=+149.270322476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.003531 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q"] Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.016312 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm"] Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.017706 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.024573 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx"] Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.103398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.103732 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.603716522 +0000 UTC m=+149.371888131 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.192687 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-57xxf"] Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.204182 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.204393 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.704362667 +0000 UTC m=+149.472534276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.204609 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.205007 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.704986747 +0000 UTC m=+149.473158356 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.305888 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.306726 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.806703946 +0000 UTC m=+149.574875555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.408136 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.408525 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:48.908512058 +0000 UTC m=+149.676683657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.497812 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" event={"ID":"550aea27-f732-47cb-9c04-93edb6216ae0","Type":"ContainerStarted","Data":"e47d9420d5bffef7a79d9994e849672320d55565d489a858a2f87e99e7fdb1de"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.513171 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.513610 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.013590601 +0000 UTC m=+149.781762210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.569413 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" event={"ID":"465a2899-647c-4144-8810-46a4a4e49909","Type":"ContainerStarted","Data":"37e842f711ce6c30f1a1c17781093e1b43cfc22cf0c1256812adea79cc03a634"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.585008 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"8a9b59ef428e2376a68a6464989e82ed03b60de04981687f1ab67e4d4b4552f0"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.586417 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.603146 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" event={"ID":"9118a944-3b9c-4f67-917a-899581233d1e","Type":"ContainerStarted","Data":"a4d516453d0da8b589707ffab25dc94d2445873b9f135f17fed2cd61b0b7dd70"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.603205 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" event={"ID":"9118a944-3b9c-4f67-917a-899581233d1e","Type":"ContainerStarted","Data":"eca247523372c05e7c3f1c36a8c2cf1a6907201178dbab7bf81d96aeb0511851"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.615312 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.617107 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.117086826 +0000 UTC m=+149.885258435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.620677 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz"] Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.640793 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vkq5m" event={"ID":"542c0442-1fb2-4e3e-ba06-a14526cf98ce","Type":"ContainerStarted","Data":"994a5d71e2ef8063f5b4cb17a1ceeec90650847f93964d78446d5594662f2153"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.649347 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-st8vq"] Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.658274 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" event={"ID":"ab7371ad-5001-4223-84f1-c7d55ce3c5f3","Type":"ContainerStarted","Data":"84a0ccaf9ca03db749bd9401992ca912d2bf29bcf0ca5d78e48f031a401d8ddd"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.732612 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.733517 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.233492035 +0000 UTC m=+150.001663644 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.751505 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" event={"ID":"94a81839-4806-4112-8c30-fb70049d8296","Type":"ContainerStarted","Data":"97bc6000a766c376659a6b6ffbe29d3454cb54d6e0eadbc9ca05f0c6f7e04b58"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.786912 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.836653 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.838237 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.338215418 +0000 UTC m=+150.106387027 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.839724 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" event={"ID":"de02b605-6b0f-476b-9df3-fd41e3a320a2","Type":"ContainerStarted","Data":"8ae884244dfa3b6f14dbac8a0293fcdd09b557626dc16cb1014738802614b60e"} Nov 30 06:48:48 crc kubenswrapper[4941]: W1130 06:48:48.852764 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cffa0e5_cf3c_450b_b289_d52fe242ad11.slice/crio-21a6031cbec8ecfad538226cdd0bc3e6aecb20ca998b821c7fa7f9d28d25849e WatchSource:0}: Error finding container 21a6031cbec8ecfad538226cdd0bc3e6aecb20ca998b821c7fa7f9d28d25849e: Status 404 returned error can't find the container with id 21a6031cbec8ecfad538226cdd0bc3e6aecb20ca998b821c7fa7f9d28d25849e Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.906928 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" event={"ID":"59417585-2120-43f8-a16f-dc80dc6d1fd0","Type":"ContainerStarted","Data":"8b1dc59b81ffe07798f585f7f472911e510a22ba18163c12ed21582a560758c1"} Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.921267 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-pkrwj" podStartSLOduration=127.921242513 podStartE2EDuration="2m7.921242513s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:48.886196577 +0000 UTC m=+149.654368186" watchObservedRunningTime="2025-11-30 06:48:48.921242513 +0000 UTC m=+149.689414122" Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.937758 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:48 crc kubenswrapper[4941]: E1130 06:48:48.939580 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.439557385 +0000 UTC m=+150.207728994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:48 crc kubenswrapper[4941]: I1130 06:48:48.990412 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f39950c5d116bc0163dbc21399e6af1fe9ae9bbb688e70eb273d07852e00616f"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.006257 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" event={"ID":"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd","Type":"ContainerStarted","Data":"7b9e56b3d34ba2a688a33b6663cfb5f6ebe555b375c2b5ca9ceb53454c84daa6"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.014277 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-57xxf" event={"ID":"6b3b1c30-7677-4bff-85a6-80dd7a3548b0","Type":"ContainerStarted","Data":"60ee4c392bd7e475415074a0d339c2002d17e37afda07ef61aafbc895e57314d"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.019541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" event={"ID":"abdfc490-9871-4dd3-84b3-5f446f68f102","Type":"ContainerStarted","Data":"0b174940e8e1de10709a350ef02135d66d5944bf211e0f159afc7f31c34a8d2a"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.030905 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" event={"ID":"5ccfee3c-1083-4a51-a25b-4678f31d3a51","Type":"ContainerStarted","Data":"54d3129817b120dcdada567847544f49d11cccf08094a0861e091c57fb8d9212"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.038714 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-css84" event={"ID":"df9aa967-eec9-4ce5-9c64-edff3aedca4a","Type":"ContainerStarted","Data":"62503a9f9529cef2a6a5b2e490eaa9f2984b565209e1c0cecd8e6c48bda45d36"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.040260 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" podStartSLOduration=128.040248552 podStartE2EDuration="2m8.040248552s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:49.039236881 +0000 UTC m=+149.807408490" watchObservedRunningTime="2025-11-30 06:48:49.040248552 +0000 UTC m=+149.808420321" Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.041412 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.045204 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.545186256 +0000 UTC m=+150.313357865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.045629 4941 generic.go:334] "Generic (PLEG): container finished" podID="f38e9faf-5fe2-49c8-8516-2b5f2766199e" containerID="2beef7d6ef8b66d87b0d4f973ae86ac80ee17357a39f78d0c99c6ad615f73eee" exitCode=0 Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.045806 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" event={"ID":"f38e9faf-5fe2-49c8-8516-2b5f2766199e","Type":"ContainerDied","Data":"2beef7d6ef8b66d87b0d4f973ae86ac80ee17357a39f78d0c99c6ad615f73eee"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.055109 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" event={"ID":"5c335028-5b89-477e-9bf1-e76f8f249d1a","Type":"ContainerStarted","Data":"330ea519c22948d6efe0e74ab3fc247e4a9059c6d1f52e7d15b89c54a82e2601"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.060625 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-hcx8m" event={"ID":"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac","Type":"ContainerStarted","Data":"12058a42ee74c7e3aa1ad1e3897fefff1cde2bdbe5212bb3bc6613baf19bbe9a"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.078265 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"956f43cb20cc2bfef1cfc01974e4d47e8e49c7bba70f991b8ecec218cb3cfafa"} Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.091555 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.091806 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.094034 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-zvvg4"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.143638 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.145986 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.645934715 +0000 UTC m=+150.414106324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.150584 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.151153 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.651139749 +0000 UTC m=+150.419311358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.153194 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.165158 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.257287 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.259983 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.759951029 +0000 UTC m=+150.528122638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.292250 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.329743 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.359767 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-gjx4m" podStartSLOduration=128.359745348 podStartE2EDuration="2m8.359745348s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:49.357450707 +0000 UTC m=+150.125622316" watchObservedRunningTime="2025-11-30 06:48:49.359745348 +0000 UTC m=+150.127916947" Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.361132 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.362567 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.862539516 +0000 UTC m=+150.630711125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: W1130 06:48:49.431354 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf402eed_bc58_4a99_ab0c_bd75b072ebf9.slice/crio-e7da3476fc7b5dcb2f15d96fd28b114d48549ccadb9a3c569a64730392c8c198 WatchSource:0}: Error finding container e7da3476fc7b5dcb2f15d96fd28b114d48549ccadb9a3c569a64730392c8c198: Status 404 returned error can't find the container with id e7da3476fc7b5dcb2f15d96fd28b114d48549ccadb9a3c569a64730392c8c198 Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.462053 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.462425 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:49.962405236 +0000 UTC m=+150.730576845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.566390 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.566808 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.066792869 +0000 UTC m=+150.834964468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.652890 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" podStartSLOduration=128.652863579 podStartE2EDuration="2m8.652863579s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:49.644718225 +0000 UTC m=+150.412889824" watchObservedRunningTime="2025-11-30 06:48:49.652863579 +0000 UTC m=+150.421035188" Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.668591 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.669033 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.169008264 +0000 UTC m=+150.937179873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.772044 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.772757 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.272737377 +0000 UTC m=+151.040908976 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859016 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859057 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859073 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859084 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859097 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vw95c"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859111 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859124 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.859134 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-pgpzd"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.883637 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.883991 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.383969893 +0000 UTC m=+151.152141492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:49 crc kubenswrapper[4941]: W1130 06:48:49.939772 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43fead9f_0d7a_4d82_8822_b4e83849d4ad.slice/crio-4e69c64019afdf1464a32e19bea8f0890eb9678ee1a4c5718ad754f33a9d00a4 WatchSource:0}: Error finding container 4e69c64019afdf1464a32e19bea8f0890eb9678ee1a4c5718ad754f33a9d00a4: Status 404 returned error can't find the container with id 4e69c64019afdf1464a32e19bea8f0890eb9678ee1a4c5718ad754f33a9d00a4 Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.982465 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s"] Nov 30 06:48:49 crc kubenswrapper[4941]: I1130 06:48:49.985601 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:49 crc kubenswrapper[4941]: E1130 06:48:49.986110 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.486092134 +0000 UTC m=+151.254263743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.028492 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-mt5dk" podStartSLOduration=129.028472389 podStartE2EDuration="2m9.028472389s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.026821058 +0000 UTC m=+150.794992667" watchObservedRunningTime="2025-11-30 06:48:50.028472389 +0000 UTC m=+150.796643998" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.084527 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q"] Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.086296 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.086417 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.58639061 +0000 UTC m=+151.354562209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.101855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.102435 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.60241864 +0000 UTC m=+151.370590249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.128141 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" podStartSLOduration=129.128121584 podStartE2EDuration="2m9.128121584s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.123956143 +0000 UTC m=+150.892127752" watchObservedRunningTime="2025-11-30 06:48:50.128121584 +0000 UTC m=+150.896293193" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.137548 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh"] Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.207578 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.207818 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.707802434 +0000 UTC m=+151.475974043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.257295 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" podStartSLOduration=129.25727241 podStartE2EDuration="2m9.25727241s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.188536792 +0000 UTC m=+150.956708401" watchObservedRunningTime="2025-11-30 06:48:50.25727241 +0000 UTC m=+151.025444019" Nov 30 06:48:50 crc kubenswrapper[4941]: W1130 06:48:50.257551 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66ac15af_4bac_4898_a79d_3481df21bd0e.slice/crio-9896c8797734a8ca9fea2d86ec3f5eae98f3b27d2e9554e5c3a697028a76213d WatchSource:0}: Error finding container 9896c8797734a8ca9fea2d86ec3f5eae98f3b27d2e9554e5c3a697028a76213d: Status 404 returned error can't find the container with id 9896c8797734a8ca9fea2d86ec3f5eae98f3b27d2e9554e5c3a697028a76213d Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.280572 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-jfj9x"] Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.308306 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" event={"ID":"c4234d88-5b46-45ed-8a17-ac586277459f","Type":"ContainerStarted","Data":"38f2c760e07dcfef6d1c2961fbaf64aee699f0ff871acbdf9cdd3770f1d62694"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.310915 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.311248 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.811234817 +0000 UTC m=+151.579406426 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.334543 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" event={"ID":"abdfc490-9871-4dd3-84b3-5f446f68f102","Type":"ContainerStarted","Data":"03c79d954b5217a58ef7756fbe78c7a5ff59feeb454040aef0aff34d7a76e5dd"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.370786 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4qn2t" event={"ID":"ab7371ad-5001-4223-84f1-c7d55ce3c5f3","Type":"ContainerStarted","Data":"4c133a800a5f5a2720f837f6faad219fb452ff2db85e3227fc671e1e3c865b0d"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.387945 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-57xxf" event={"ID":"6b3b1c30-7677-4bff-85a6-80dd7a3548b0","Type":"ContainerStarted","Data":"c4ee3533ec35d07c49f41f3ca6ebc267feee3210a62e09829e625314a86bf19c"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.413402 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.415090 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:50.915070402 +0000 UTC m=+151.683242001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.423982 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" event={"ID":"773d34d4-1723-42c4-8b83-43f629b630c2","Type":"ContainerStarted","Data":"39e4a45189d9d9ce72e70cbefe3545414bed94e52da2d5c350e73f030498a1bb"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.437199 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" event={"ID":"0cffa0e5-cf3c-450b-b289-d52fe242ad11","Type":"ContainerStarted","Data":"f2475ceda07e9b8e01fa2adf6b264b6f873f51ba60e544f029221f24a06712d9"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.437827 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" event={"ID":"0cffa0e5-cf3c-450b-b289-d52fe242ad11","Type":"ContainerStarted","Data":"21a6031cbec8ecfad538226cdd0bc3e6aecb20ca998b821c7fa7f9d28d25849e"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.446685 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" event={"ID":"bf402eed-bc58-4a99-ab0c-bd75b072ebf9","Type":"ContainerStarted","Data":"e7da3476fc7b5dcb2f15d96fd28b114d48549ccadb9a3c569a64730392c8c198"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.514575 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.514952 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.014933463 +0000 UTC m=+151.783105082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.527563 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" event={"ID":"cc097706-3829-4b7b-9047-97a52667c825","Type":"ContainerStarted","Data":"75d5f04482eb5d8e44e1f617b0ae48d4fd498c73b8b8d4d0a4776047c7ad618b"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.550195 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-vkq5m" event={"ID":"542c0442-1fb2-4e3e-ba06-a14526cf98ce","Type":"ContainerStarted","Data":"5b2c58ef237588eafaf995ed82920515d48b865b8e2bbb01e6599009387d0101"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.567693 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.567956 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mvbfz" podStartSLOduration=129.5679356 podStartE2EDuration="2m9.5679356s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.567132715 +0000 UTC m=+151.335304344" watchObservedRunningTime="2025-11-30 06:48:50.5679356 +0000 UTC m=+151.336107209" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.605611 4941 patch_prober.go:28] interesting pod/downloads-7954f5f757-vkq5m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.605755 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vkq5m" podUID="542c0442-1fb2-4e3e-ba06-a14526cf98ce" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.612044 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-57xxf" podStartSLOduration=6.612027898 podStartE2EDuration="6.612027898s" podCreationTimestamp="2025-11-30 06:48:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.60952594 +0000 UTC m=+151.377697549" watchObservedRunningTime="2025-11-30 06:48:50.612027898 +0000 UTC m=+151.380199507" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.615239 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.616470 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" event={"ID":"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd","Type":"ContainerStarted","Data":"bab9e9fcd5471fcc62de6674eff5a7f2b9a5b96531b9f8cc1f52f7363d40f210"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.616503 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" event={"ID":"0d47c312-eb2e-4f83-8d9d-bd30bfd989dd","Type":"ContainerStarted","Data":"e61f2b9f2e650dfbfc0eb022eecc297bed2f94129ab548088e1c2c5c42566a77"} Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.618035 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.118017926 +0000 UTC m=+151.886189535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.647435 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" event={"ID":"59417585-2120-43f8-a16f-dc80dc6d1fd0","Type":"ContainerStarted","Data":"764f8de06bdc44041129052ceaede6d8aaba69158729427d9b4ccb90d4193bc2"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.694028 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" event={"ID":"179be37e-b722-4f7b-ab6a-88fb1407c2e7","Type":"ContainerStarted","Data":"ec80ac0476df3bee1c610032565feb8e1b55e81f3615573f6c5c440ef4cdf8f8"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.694623 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" podStartSLOduration=128.694610049 podStartE2EDuration="2m8.694610049s" podCreationTimestamp="2025-11-30 06:46:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.660394889 +0000 UTC m=+151.428566498" watchObservedRunningTime="2025-11-30 06:48:50.694610049 +0000 UTC m=+151.462781658" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.698979 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-hcx8m" podStartSLOduration=6.698964996 podStartE2EDuration="6.698964996s" podCreationTimestamp="2025-11-30 06:48:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.695245419 +0000 UTC m=+151.463417028" watchObservedRunningTime="2025-11-30 06:48:50.698964996 +0000 UTC m=+151.467136605" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.720580 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" event={"ID":"5c335028-5b89-477e-9bf1-e76f8f249d1a","Type":"ContainerStarted","Data":"d56a2aa2010bf54f5b1ac695eca5c2f83cc6eead21c3f01087e1fd8d76595b64"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.721188 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.723605 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.223592425 +0000 UTC m=+151.991764024 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.729511 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-vkq5m" podStartSLOduration=129.729477009 podStartE2EDuration="2m9.729477009s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.718985261 +0000 UTC m=+151.487156870" watchObservedRunningTime="2025-11-30 06:48:50.729477009 +0000 UTC m=+151.497648618" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.738368 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" event={"ID":"29524d66-9853-4dde-9ba5-9e124a055dd2","Type":"ContainerStarted","Data":"3b4e748d786754aa22f51940530c1f10b30e8e3567c1e22fe03d61f305cdf552"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.739880 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" event={"ID":"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73","Type":"ContainerStarted","Data":"7de5bf6edd6f637889ccfaf29d74553c460201fc7d19fe5e3939e3cb6858567c"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.752683 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jxshm" podStartSLOduration=129.752658404 podStartE2EDuration="2m9.752658404s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.75000331 +0000 UTC m=+151.518174909" watchObservedRunningTime="2025-11-30 06:48:50.752658404 +0000 UTC m=+151.520830013" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.757264 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" event={"ID":"43fead9f-0d7a-4d82-8822-b4e83849d4ad","Type":"ContainerStarted","Data":"4e69c64019afdf1464a32e19bea8f0890eb9678ee1a4c5718ad754f33a9d00a4"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.766606 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" event={"ID":"b426c32f-f8fd-4019-af68-7d5febadde67","Type":"ContainerStarted","Data":"410593257a16c4bd5dccd36371f5e975f2c49a340cd8def591761bc48a05efc8"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.767591 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" event={"ID":"11220b25-0e89-43b7-95ef-4385c047753e","Type":"ContainerStarted","Data":"854aa562d62d921cbbec2cca87539ec1a111aa891367a6d3554d147321499092"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.769745 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.792534 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rwj5c" event={"ID":"fb91f88b-4fc5-464d-ac96-88b0828e67c0","Type":"ContainerStarted","Data":"f5edb6fdc2b8656d4fc59c0f92179de8b62663375e49605be36388e765ddd326"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.792586 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rwj5c" event={"ID":"fb91f88b-4fc5-464d-ac96-88b0828e67c0","Type":"ContainerStarted","Data":"fa06119f036ba54056a8796db0ac7e5e1dede12afaaf9b091981a85b36a0d5ee"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.821901 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.822437 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.322419424 +0000 UTC m=+152.090591033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.823051 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" podStartSLOduration=129.823039563 podStartE2EDuration="2m9.823039563s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.821143684 +0000 UTC m=+151.589315293" watchObservedRunningTime="2025-11-30 06:48:50.823039563 +0000 UTC m=+151.591211172" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.842393 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-st8vq" event={"ID":"e6486e25-f042-42ac-a9e4-8a399bf9b414","Type":"ContainerStarted","Data":"be7efa3b44a68cd68961299598589eb8f22b9aec7672f8eab2d718393f8b67b9"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.844174 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" event={"ID":"0cc75551-8b40-4ae7-a917-7aaa202313c4","Type":"ContainerStarted","Data":"36c4ab33ae6856a8ce5d5ff32b850fa254f638fd401bd64fe1ad98ff7a192d39"} Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.870771 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" podStartSLOduration=129.870742724 podStartE2EDuration="2m9.870742724s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.870497616 +0000 UTC m=+151.638669225" watchObservedRunningTime="2025-11-30 06:48:50.870742724 +0000 UTC m=+151.638914333" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.877804 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.877858 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.878697 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.883600 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-526zm" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.901849 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" podStartSLOduration=129.901829915 podStartE2EDuration="2m9.901829915s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.899074419 +0000 UTC m=+151.667246028" watchObservedRunningTime="2025-11-30 06:48:50.901829915 +0000 UTC m=+151.670001524" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.903549 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:50 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:50 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:50 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.903619 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.926018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:50 crc kubenswrapper[4941]: E1130 06:48:50.927809 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.427787237 +0000 UTC m=+152.195958846 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.941458 4941 patch_prober.go:28] interesting pod/apiserver-76f77b778f-frzl6 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]log ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]etcd ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/generic-apiserver-start-informers ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/max-in-flight-filter ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/image.openshift.io-apiserver-caches ok Nov 30 06:48:50 crc kubenswrapper[4941]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Nov 30 06:48:50 crc kubenswrapper[4941]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/project.openshift.io-projectcache ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/openshift.io-startinformers ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/openshift.io-restmapperupdater ok Nov 30 06:48:50 crc kubenswrapper[4941]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 30 06:48:50 crc kubenswrapper[4941]: livez check failed Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.941547 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" podUID="179be37e-b722-4f7b-ab6a-88fb1407c2e7" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.949063 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" podStartSLOduration=128.949042161 podStartE2EDuration="2m8.949042161s" podCreationTimestamp="2025-11-30 06:46:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.948194545 +0000 UTC m=+151.716366154" watchObservedRunningTime="2025-11-30 06:48:50.949042161 +0000 UTC m=+151.717213770" Nov 30 06:48:50 crc kubenswrapper[4941]: I1130 06:48:50.986866 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-rwj5c" podStartSLOduration=129.986843963 podStartE2EDuration="2m9.986843963s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:50.983923382 +0000 UTC m=+151.752094991" watchObservedRunningTime="2025-11-30 06:48:50.986843963 +0000 UTC m=+151.755015572" Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.026850 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.028259 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.528239537 +0000 UTC m=+152.296411146 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.032756 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9bcts" podStartSLOduration=130.032730567 podStartE2EDuration="2m10.032730567s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:51.031091076 +0000 UTC m=+151.799262675" watchObservedRunningTime="2025-11-30 06:48:51.032730567 +0000 UTC m=+151.800902176" Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.142216 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.143117 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.643098456 +0000 UTC m=+152.411270065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.244456 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.244894 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.744872578 +0000 UTC m=+152.513044187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.273089 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.348613 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.349334 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.849307281 +0000 UTC m=+152.617478880 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.451271 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.451643 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:51.951595529 +0000 UTC m=+152.719767138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.553157 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.554307 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.054291608 +0000 UTC m=+152.822463217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.617360 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.658265 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.658375 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.158355341 +0000 UTC m=+152.926526950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.658873 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.660791 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.160775106 +0000 UTC m=+152.928946715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.759983 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.760301 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.260278577 +0000 UTC m=+153.028450186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.863140 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.863575 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.363554954 +0000 UTC m=+153.131726563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.883185 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:51 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:51 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:51 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.883250 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.883865 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" event={"ID":"c4666640-fb87-46f0-8731-f9946f1f7470","Type":"ContainerStarted","Data":"9f5ed490e8ff2817e4c8873433369b8e5613fbb1296a2b4cc765f411f2b2003b"} Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.883915 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" event={"ID":"c4666640-fb87-46f0-8731-f9946f1f7470","Type":"ContainerStarted","Data":"bc550d06b29c5f264cc1a432e0f9a1dbf22971e7ea2b0850efff882ac3aeb5d6"} Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.896156 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" event={"ID":"de02b605-6b0f-476b-9df3-fd41e3a320a2","Type":"ContainerStarted","Data":"b941b4a9571f8b3a4c60b0f7e9c682fb11cff1eeb15b88f17634436e21c068e5"} Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.939517 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-st8vq" event={"ID":"e6486e25-f042-42ac-a9e4-8a399bf9b414","Type":"ContainerStarted","Data":"06e5f3a90ef48e0e01b1b96dde954c566fefa03b2a84381ccb513c5080da1738"} Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.963550 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2hstq" event={"ID":"7d4e3dcb-7c2e-46e0-b8eb-7ad4bfd98c73","Type":"ContainerStarted","Data":"fa92df65784896430a5e525bdca39352e9da300edb692cf87bb00c96ec653e12"} Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.965019 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:51 crc kubenswrapper[4941]: E1130 06:48:51.966767 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.466732059 +0000 UTC m=+153.234903668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.992706 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fmnw2" event={"ID":"11220b25-0e89-43b7-95ef-4385c047753e","Type":"ContainerStarted","Data":"1b5d0235bd4ba50f5b6175d9b2451f5e8a379f78f081da5f8c4befb3dbfdae31"} Nov 30 06:48:51 crc kubenswrapper[4941]: I1130 06:48:51.995943 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-pgpzd" podStartSLOduration=130.995930731 podStartE2EDuration="2m10.995930731s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:51.993991411 +0000 UTC m=+152.762163020" watchObservedRunningTime="2025-11-30 06:48:51.995930731 +0000 UTC m=+152.764102340" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.008552 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" event={"ID":"43fead9f-0d7a-4d82-8822-b4e83849d4ad","Type":"ContainerStarted","Data":"3c93e3a7fa54f0faf07ea224bc60eb39cfee08efabce356fb5fa279efcfce3cc"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.009187 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.016290 4941 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-vw95c container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.016362 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.024934 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" event={"ID":"63606771-b004-4903-a2a1-d5032a0fa94b","Type":"ContainerStarted","Data":"43d31fc44564c4f52e310e0ac95b3fb5091384da2d3ba384695525da44875973"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.024976 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" event={"ID":"63606771-b004-4903-a2a1-d5032a0fa94b","Type":"ContainerStarted","Data":"202981c0ec7e1919670e99039851b8ebf0fb0c0604b37fb2c155edaa060cf994"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.027133 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" event={"ID":"ef794854-78b3-48a6-8fa0-3d2bd613ecd2","Type":"ContainerStarted","Data":"ddf5bf6f83435a727ace797da2c0c3fdb7d9b3da09c56c30a033145461dc41f7"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.027155 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" event={"ID":"ef794854-78b3-48a6-8fa0-3d2bd613ecd2","Type":"ContainerStarted","Data":"134cf9a0fbc17a21a5952db2894bc87850fef991f83bd2ba4b35ce693b5c3b8b"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.028502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" event={"ID":"5ccfee3c-1083-4a51-a25b-4678f31d3a51","Type":"ContainerStarted","Data":"f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.035892 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" event={"ID":"0cc75551-8b40-4ae7-a917-7aaa202313c4","Type":"ContainerStarted","Data":"6df48feb38cd7784fb279e5a3e5900656e4ef605ab8f8339dcdda0511fb0be53"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.036790 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.040438 4941 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-g9klz container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.040489 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" podUID="0cc75551-8b40-4ae7-a917-7aaa202313c4" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.060392 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" event={"ID":"abdfc490-9871-4dd3-84b3-5f446f68f102","Type":"ContainerStarted","Data":"72fcbf82d2455d0a9521052dd233d81fe3bddd1a4ae04efed16068592c297d52"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.066243 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.067518 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.567503119 +0000 UTC m=+153.335674728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.087832 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" event={"ID":"179be37e-b722-4f7b-ab6a-88fb1407c2e7","Type":"ContainerStarted","Data":"0518ae08a16e92e79092723d3625255bf4cf4f2150d05ab5cbf5dffa707653cc"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.112653 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" podStartSLOduration=131.112632869 podStartE2EDuration="2m11.112632869s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.07651406 +0000 UTC m=+152.844685669" watchObservedRunningTime="2025-11-30 06:48:52.112632869 +0000 UTC m=+152.880804478" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.147524 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" podStartSLOduration=131.147503088 podStartE2EDuration="2m11.147503088s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.145841997 +0000 UTC m=+152.914013606" watchObservedRunningTime="2025-11-30 06:48:52.147503088 +0000 UTC m=+152.915674697" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.148386 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ppcvh" podStartSLOduration=131.148380486 podStartE2EDuration="2m11.148380486s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.113687492 +0000 UTC m=+152.881859101" watchObservedRunningTime="2025-11-30 06:48:52.148380486 +0000 UTC m=+152.916552095" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.170900 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.171240 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.67120522 +0000 UTC m=+153.439376829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.171570 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.173717 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.673702778 +0000 UTC m=+153.441874387 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.189931 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-hzh6t" podStartSLOduration=131.189904644 podStartE2EDuration="2m11.189904644s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.188871562 +0000 UTC m=+152.957043171" watchObservedRunningTime="2025-11-30 06:48:52.189904644 +0000 UTC m=+152.958076253" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.191794 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" event={"ID":"773d34d4-1723-42c4-8b83-43f629b630c2","Type":"ContainerStarted","Data":"cb07af51b3105ae67f0fe232c46fd6912a357c344881a250b64edafdf6ae76ab"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.229199 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" event={"ID":"59417585-2120-43f8-a16f-dc80dc6d1fd0","Type":"ContainerStarted","Data":"bce205ce66d48c1019fb9c2f2d0fd25bef72a0905378e565c7dae73384db7d98"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.238542 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" podStartSLOduration=131.238521614 podStartE2EDuration="2m11.238521614s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.235891792 +0000 UTC m=+153.004063401" watchObservedRunningTime="2025-11-30 06:48:52.238521614 +0000 UTC m=+153.006693213" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.259574 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rw92q" podStartSLOduration=131.259555721 podStartE2EDuration="2m11.259555721s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.257556589 +0000 UTC m=+153.025728198" watchObservedRunningTime="2025-11-30 06:48:52.259555721 +0000 UTC m=+153.027727330" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.276847 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.277408 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.777387739 +0000 UTC m=+153.545559348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.285063 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" event={"ID":"c4234d88-5b46-45ed-8a17-ac586277459f","Type":"ContainerStarted","Data":"6d54cd5fd28838d900e9d12e467e8334416656e939ec7622875e6849aa16fc48"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.302597 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-zvvg4" event={"ID":"cc097706-3829-4b7b-9047-97a52667c825","Type":"ContainerStarted","Data":"406b777fd941146ec434b19b253401fb04fd8756650877c1b3e2fc8a3608cfe5"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.331472 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" podStartSLOduration=131.331441738 podStartE2EDuration="2m11.331441738s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.317016007 +0000 UTC m=+153.085187616" watchObservedRunningTime="2025-11-30 06:48:52.331441738 +0000 UTC m=+153.099613337" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.371753 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-hcx8m" event={"ID":"ad1e76c8-6a3f-4012-ab00-ebaba2e536ac","Type":"ContainerStarted","Data":"5f20baca39b428d6dbaf8fb32e3fd4de636bb7dc559a707e4c5215fac5cc15b4"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.408061 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.410093 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:52.910063306 +0000 UTC m=+153.678234915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.441528 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" event={"ID":"f38e9faf-5fe2-49c8-8516-2b5f2766199e","Type":"ContainerStarted","Data":"c80431d68595df6c87b54c0aa394ab49c49226ab98183567cef168437bb7fc35"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.469058 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" podStartSLOduration=131.469034158 podStartE2EDuration="2m11.469034158s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.465428906 +0000 UTC m=+153.233600515" watchObservedRunningTime="2025-11-30 06:48:52.469034158 +0000 UTC m=+153.237205757" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.477845 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" event={"ID":"bf402eed-bc58-4a99-ab0c-bd75b072ebf9","Type":"ContainerStarted","Data":"0306285624bdc78f795d9ec3cf0aed9a0eb29d10ec589cadf1f604014e1eeeca"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.480244 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" event={"ID":"550aea27-f732-47cb-9c04-93edb6216ae0","Type":"ContainerStarted","Data":"ba86128f7fa8fc285e74d0fe0c39a96bc1b4cddf496bf0e3807329cb4b76f598"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.480796 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.511179 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.511249 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" event={"ID":"f90130b9-9551-4823-9397-48b583729552","Type":"ContainerStarted","Data":"7720b17160fb47f924c358befc9e266e1a9f56a4cb4145bd44c6b413ef868397"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.511303 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" event={"ID":"f90130b9-9551-4823-9397-48b583729552","Type":"ContainerStarted","Data":"c9e02292866157b32061bd0dd8393a87dcc6dc8d6c44b8c996eeb87063a4b07c"} Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.511765 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.011741853 +0000 UTC m=+153.779913452 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.532077 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rxb6" podStartSLOduration=131.532058258 podStartE2EDuration="2m11.532058258s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.530008784 +0000 UTC m=+153.298180403" watchObservedRunningTime="2025-11-30 06:48:52.532058258 +0000 UTC m=+153.300229867" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.539299 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" event={"ID":"66ac15af-4bac-4898-a79d-3481df21bd0e","Type":"ContainerStarted","Data":"361bf14f847c186f38177c6da0685cbc69a790e003a6ff6dd53d6063bc85180e"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.539371 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.539383 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" event={"ID":"66ac15af-4bac-4898-a79d-3481df21bd0e","Type":"ContainerStarted","Data":"9896c8797734a8ca9fea2d86ec3f5eae98f3b27d2e9554e5c3a697028a76213d"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.550730 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" event={"ID":"29524d66-9853-4dde-9ba5-9e124a055dd2","Type":"ContainerStarted","Data":"141e9569a2d8159df0dd6aaf2fcb845647a705054c43476913723014123bafab"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.566694 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" event={"ID":"b426c32f-f8fd-4019-af68-7d5febadde67","Type":"ContainerStarted","Data":"b94c8e56a867aa6f2bc030a11e5c4edf4c42894e43fdb06b92eadb1d76540915"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.567168 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" event={"ID":"b426c32f-f8fd-4019-af68-7d5febadde67","Type":"ContainerStarted","Data":"03ac6f312425251ec7eef09ca10abb094cf1ade5703f8b2c6afb315ee3731e30"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.567825 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.569691 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.593541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-css84" event={"ID":"df9aa967-eec9-4ce5-9c64-edff3aedca4a","Type":"ContainerStarted","Data":"7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.611050 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" podStartSLOduration=131.611034617 podStartE2EDuration="2m11.611034617s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.609443756 +0000 UTC m=+153.377615365" watchObservedRunningTime="2025-11-30 06:48:52.611034617 +0000 UTC m=+153.379206226" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.611613 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" podStartSLOduration=131.611609175 podStartE2EDuration="2m11.611609175s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.582907247 +0000 UTC m=+153.351078856" watchObservedRunningTime="2025-11-30 06:48:52.611609175 +0000 UTC m=+153.379780784" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.614049 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.614452 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.114438313 +0000 UTC m=+153.882609922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.625644 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" event={"ID":"9118a944-3b9c-4f67-917a-899581233d1e","Type":"ContainerStarted","Data":"e487d198435a974bbe10237b0764f801a95689f416ee9143c0b6690465c2df93"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.642485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" event={"ID":"3d7c5acd-fc0a-4c18-8b24-59227b10369f","Type":"ContainerStarted","Data":"190a12a3d2efd4700306b4b1ce6e622bdacdb6e832d112fe749c476131b22f76"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.642539 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" event={"ID":"3d7c5acd-fc0a-4c18-8b24-59227b10369f","Type":"ContainerStarted","Data":"a86d8d7774fc1ce81193b545787cc839ee3b58509212bb355e9da6e222c24757"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.643401 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-css84" podStartSLOduration=131.643376787 podStartE2EDuration="2m11.643376787s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.641031694 +0000 UTC m=+153.409203303" watchObservedRunningTime="2025-11-30 06:48:52.643376787 +0000 UTC m=+153.411548396" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.662412 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" event={"ID":"2faf43c0-49a3-41f7-a278-180d44c03689","Type":"ContainerStarted","Data":"5bb5168f2f78cabb64d975d8041ba13c88c72de16b560bf1ca638bc6ba8d8197"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.662654 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" event={"ID":"2faf43c0-49a3-41f7-a278-180d44c03689","Type":"ContainerStarted","Data":"db378d7c5e2fc8432f8c7b38f19058ab56f9c9e4d29a254c80433df8fdf84f38"} Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.665099 4941 patch_prober.go:28] interesting pod/downloads-7954f5f757-vkq5m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.665163 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vkq5m" podUID="542c0442-1fb2-4e3e-ba06-a14526cf98ce" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.688746 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-28g6q" podStartSLOduration=131.688725905 podStartE2EDuration="2m11.688725905s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.664491327 +0000 UTC m=+153.432662936" watchObservedRunningTime="2025-11-30 06:48:52.688725905 +0000 UTC m=+153.456897514" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.719707 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.721127 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.221105926 +0000 UTC m=+153.989277535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.726010 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" podStartSLOduration=131.72599181 podStartE2EDuration="2m11.72599181s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.725213465 +0000 UTC m=+153.493385074" watchObservedRunningTime="2025-11-30 06:48:52.72599181 +0000 UTC m=+153.494163419" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.731829 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tvt4t" podStartSLOduration=130.731810332 podStartE2EDuration="2m10.731810332s" podCreationTimestamp="2025-11-30 06:46:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.692761121 +0000 UTC m=+153.460932730" watchObservedRunningTime="2025-11-30 06:48:52.731810332 +0000 UTC m=+153.499981941" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.785699 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-cpxtg" podStartSLOduration=131.785674334 podStartE2EDuration="2m11.785674334s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.759734684 +0000 UTC m=+153.527906293" watchObservedRunningTime="2025-11-30 06:48:52.785674334 +0000 UTC m=+153.553845943" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.819678 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sk4xx" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.820442 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-4n8bt" podStartSLOduration=131.820420281 podStartE2EDuration="2m11.820420281s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.787849773 +0000 UTC m=+153.556021382" watchObservedRunningTime="2025-11-30 06:48:52.820420281 +0000 UTC m=+153.588591890" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.831232 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.831837 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.331822467 +0000 UTC m=+154.099994076 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.855060 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4m85s" podStartSLOduration=131.855041743 podStartE2EDuration="2m11.855041743s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:52.819824412 +0000 UTC m=+153.587996021" watchObservedRunningTime="2025-11-30 06:48:52.855041743 +0000 UTC m=+153.623213352" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.883259 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:52 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:52 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:52 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.883351 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:52 crc kubenswrapper[4941]: I1130 06:48:52.932201 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:52 crc kubenswrapper[4941]: E1130 06:48:52.932592 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.432569936 +0000 UTC m=+154.200741535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.033290 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.033688 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.533673846 +0000 UTC m=+154.301845455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.134057 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.134406 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.634386464 +0000 UTC m=+154.402558073 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.235080 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.235550 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.735528015 +0000 UTC m=+154.503699624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.297473 4941 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.336747 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.337071 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.837050317 +0000 UTC m=+154.605221926 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.438292 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.438784 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:53.938764167 +0000 UTC m=+154.706935776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.539634 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.539835 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.039800135 +0000 UTC m=+154.807971734 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.539982 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.540423 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.040412594 +0000 UTC m=+154.808584193 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.641173 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.641451 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.141404901 +0000 UTC m=+154.909576510 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.641539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.641936 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.141918376 +0000 UTC m=+154.910089985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.689953 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" event={"ID":"ef794854-78b3-48a6-8fa0-3d2bd613ecd2","Type":"ContainerStarted","Data":"8c4f8e9db5850c48dd3f38d1b9e27111c68885bc648b2106795f2df31d179948"} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.712416 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-4rj4n" event={"ID":"f90130b9-9551-4823-9397-48b583729552","Type":"ContainerStarted","Data":"a59fd133e765e50076e5b94304c1453745febe64fc8dfff6816fb3a9f75689b1"} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.731518 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-qhxrz" event={"ID":"c4234d88-5b46-45ed-8a17-ac586277459f","Type":"ContainerStarted","Data":"46888ca29239befe92e4706dedd25bd75a95efed962f49070aa3c3c9e2ea0028"} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.743360 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.743551 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.243517691 +0000 UTC m=+155.011689300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.743779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.744978 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.244968887 +0000 UTC m=+155.013140496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.745104 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" event={"ID":"de02b605-6b0f-476b-9df3-fd41e3a320a2","Type":"ContainerStarted","Data":"8886c3aaab4a05c847f97cb62a1a7246608f41a95c883bfa743e50677340a826"} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.745167 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" event={"ID":"de02b605-6b0f-476b-9df3-fd41e3a320a2","Type":"ContainerStarted","Data":"ad091cb4724588fcf20e2f3e624832dd781c5c2717a336705807a9033a3c7142"} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.756349 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-st8vq" event={"ID":"e6486e25-f042-42ac-a9e4-8a399bf9b414","Type":"ContainerStarted","Data":"b47595d13dfb8c61a7b88e0d7a74e9b071b9bb0dcf7d6f45b556d09fda38c22e"} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.756498 4941 patch_prober.go:28] interesting pod/downloads-7954f5f757-vkq5m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.756570 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vkq5m" podUID="542c0442-1fb2-4e3e-ba06-a14526cf98ce" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.757439 4941 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-vw95c container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" start-of-body= Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.757483 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.31:8080/healthz\": dial tcp 10.217.0.31:8080: connect: connection refused" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.759172 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-st8vq" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.767903 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-g9klz" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.829716 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-jfj9x" podStartSLOduration=132.829693675 podStartE2EDuration="2m12.829693675s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:53.723660061 +0000 UTC m=+154.491831670" watchObservedRunningTime="2025-11-30 06:48:53.829693675 +0000 UTC m=+154.597865284" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.845215 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.845386 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.345362965 +0000 UTC m=+155.113534574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.845433 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:53 crc kubenswrapper[4941]: E1130 06:48:53.845922 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-30 06:48:54.345914692 +0000 UTC m=+155.114086301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-krsrr" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.861843 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-st8vq" podStartSLOduration=9.861819779 podStartE2EDuration="9.861819779s" podCreationTimestamp="2025-11-30 06:48:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:53.831109749 +0000 UTC m=+154.599281358" watchObservedRunningTime="2025-11-30 06:48:53.861819779 +0000 UTC m=+154.629991388" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.881276 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:53 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:53 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:53 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.881399 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.891514 4941 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-30T06:48:53.297515542Z","Handler":null,"Name":""} Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.901224 4941 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.901276 4941 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.946262 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 30 06:48:53 crc kubenswrapper[4941]: I1130 06:48:53.955720 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.049274 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.111188 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.111245 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.143735 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-krsrr\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.373130 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.496383 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sx2h8"] Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.497662 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.504706 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.506008 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sx2h8"] Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.557975 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-catalog-content\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.558053 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-utilities\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.558100 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5nsw\" (UniqueName: \"kubernetes.io/projected/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-kube-api-access-n5nsw\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.627548 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krsrr"] Nov 30 06:48:54 crc kubenswrapper[4941]: W1130 06:48:54.639014 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52150dbc_4724_4cc3_a326_5caaae27246e.slice/crio-51241397ad6bb77321e81922004a201d0dde6bbb061c389bded0a08daa4347d0 WatchSource:0}: Error finding container 51241397ad6bb77321e81922004a201d0dde6bbb061c389bded0a08daa4347d0: Status 404 returned error can't find the container with id 51241397ad6bb77321e81922004a201d0dde6bbb061c389bded0a08daa4347d0 Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.659012 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-catalog-content\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.659072 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-utilities\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.659104 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5nsw\" (UniqueName: \"kubernetes.io/projected/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-kube-api-access-n5nsw\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.659516 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-utilities\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.659658 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-catalog-content\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.682711 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5nsw\" (UniqueName: \"kubernetes.io/projected/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-kube-api-access-n5nsw\") pod \"certified-operators-sx2h8\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.687029 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h5dr7"] Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.688044 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.694624 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.710869 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5dr7"] Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.766250 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phgxc\" (UniqueName: \"kubernetes.io/projected/295ff105-17f2-40dc-96ab-8cc8b03031af-kube-api-access-phgxc\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.766372 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-catalog-content\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.766446 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-utilities\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.772264 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" event={"ID":"de02b605-6b0f-476b-9df3-fd41e3a320a2","Type":"ContainerStarted","Data":"baa13e6b2e4b847d6f019ddc0e3ce9dff93e4db9925ea6ed9df553d30b422a72"} Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.775109 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" event={"ID":"52150dbc-4724-4cc3-a326-5caaae27246e","Type":"ContainerStarted","Data":"51241397ad6bb77321e81922004a201d0dde6bbb061c389bded0a08daa4347d0"} Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.782132 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.801859 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-46p7c" podStartSLOduration=10.801832489 podStartE2EDuration="10.801832489s" podCreationTimestamp="2025-11-30 06:48:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:54.789212185 +0000 UTC m=+155.557383794" watchObservedRunningTime="2025-11-30 06:48:54.801832489 +0000 UTC m=+155.570004108" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.831135 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.868215 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-catalog-content\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.868315 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-utilities\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.868562 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phgxc\" (UniqueName: \"kubernetes.io/projected/295ff105-17f2-40dc-96ab-8cc8b03031af-kube-api-access-phgxc\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.871021 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-catalog-content\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.871271 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-utilities\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.878624 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:54 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:54 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:54 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.878712 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.891215 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zsjd4"] Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.894387 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phgxc\" (UniqueName: \"kubernetes.io/projected/295ff105-17f2-40dc-96ab-8cc8b03031af-kube-api-access-phgxc\") pod \"community-operators-h5dr7\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.895673 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.896903 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zsjd4"] Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.969301 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gct7q\" (UniqueName: \"kubernetes.io/projected/a60b8750-10a6-40dc-8aea-d3f3f95a6597-kube-api-access-gct7q\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.969760 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-catalog-content\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:54 crc kubenswrapper[4941]: I1130 06:48:54.969945 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-utilities\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.056996 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sx2h8"] Nov 30 06:48:55 crc kubenswrapper[4941]: W1130 06:48:55.066766 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4348bd9e_d48d_41a7_9cbc_14620a7a8aa5.slice/crio-cc991cdbb0df7046a80133cc78023f37f982bb527899296ca7689a37cff581bb WatchSource:0}: Error finding container cc991cdbb0df7046a80133cc78023f37f982bb527899296ca7689a37cff581bb: Status 404 returned error can't find the container with id cc991cdbb0df7046a80133cc78023f37f982bb527899296ca7689a37cff581bb Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.070576 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gct7q\" (UniqueName: \"kubernetes.io/projected/a60b8750-10a6-40dc-8aea-d3f3f95a6597-kube-api-access-gct7q\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.070794 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-catalog-content\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.071003 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-utilities\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.071255 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-catalog-content\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.071513 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-utilities\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.102294 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gct7q\" (UniqueName: \"kubernetes.io/projected/a60b8750-10a6-40dc-8aea-d3f3f95a6597-kube-api-access-gct7q\") pod \"certified-operators-zsjd4\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.105731 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j7rpb"] Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.108458 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.120652 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j7rpb"] Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.122109 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.214549 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.283896 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-catalog-content\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.283937 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-utilities\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.283954 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9s6j\" (UniqueName: \"kubernetes.io/projected/596db448-f0db-4e85-bb61-465962d12a40-kube-api-access-x9s6j\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.365293 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h5dr7"] Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.384771 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-catalog-content\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.384810 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-utilities\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.384826 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9s6j\" (UniqueName: \"kubernetes.io/projected/596db448-f0db-4e85-bb61-465962d12a40-kube-api-access-x9s6j\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.385589 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-utilities\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.385866 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-catalog-content\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.405379 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9s6j\" (UniqueName: \"kubernetes.io/projected/596db448-f0db-4e85-bb61-465962d12a40-kube-api-access-x9s6j\") pod \"community-operators-j7rpb\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.439765 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zsjd4"] Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.495400 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:48:55 crc kubenswrapper[4941]: W1130 06:48:55.513291 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda60b8750_10a6_40dc_8aea_d3f3f95a6597.slice/crio-7b3bbcd895ba6eac74e8359497dc383518063aa9ee812ae386f34ed854bf7bd4 WatchSource:0}: Error finding container 7b3bbcd895ba6eac74e8359497dc383518063aa9ee812ae386f34ed854bf7bd4: Status 404 returned error can't find the container with id 7b3bbcd895ba6eac74e8359497dc383518063aa9ee812ae386f34ed854bf7bd4 Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.530654 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.728462 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j7rpb"] Nov 30 06:48:55 crc kubenswrapper[4941]: W1130 06:48:55.757144 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod596db448_f0db_4e85_bb61_465962d12a40.slice/crio-1cc232e82659ceef4b4b7eb5d34a71544d761fa5803f33a33d3eb9d6bc6e1f85 WatchSource:0}: Error finding container 1cc232e82659ceef4b4b7eb5d34a71544d761fa5803f33a33d3eb9d6bc6e1f85: Status 404 returned error can't find the container with id 1cc232e82659ceef4b4b7eb5d34a71544d761fa5803f33a33d3eb9d6bc6e1f85 Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.780373 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7rpb" event={"ID":"596db448-f0db-4e85-bb61-465962d12a40","Type":"ContainerStarted","Data":"1cc232e82659ceef4b4b7eb5d34a71544d761fa5803f33a33d3eb9d6bc6e1f85"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.784162 4941 generic.go:334] "Generic (PLEG): container finished" podID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerID="284bce6c4dfec5a10c835d6bf079b749657214e484f5cf82bf410c2243c2af21" exitCode=0 Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.784254 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sx2h8" event={"ID":"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5","Type":"ContainerDied","Data":"284bce6c4dfec5a10c835d6bf079b749657214e484f5cf82bf410c2243c2af21"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.784352 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sx2h8" event={"ID":"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5","Type":"ContainerStarted","Data":"cc991cdbb0df7046a80133cc78023f37f982bb527899296ca7689a37cff581bb"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.786227 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.786660 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" event={"ID":"52150dbc-4724-4cc3-a326-5caaae27246e","Type":"ContainerStarted","Data":"534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.786809 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.789703 4941 generic.go:334] "Generic (PLEG): container finished" podID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerID="90993c53316a955956df4dff7b571c70236e1769cd3f0b34a43e4e08cca35bb2" exitCode=0 Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.789771 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5dr7" event={"ID":"295ff105-17f2-40dc-96ab-8cc8b03031af","Type":"ContainerDied","Data":"90993c53316a955956df4dff7b571c70236e1769cd3f0b34a43e4e08cca35bb2"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.789797 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5dr7" event={"ID":"295ff105-17f2-40dc-96ab-8cc8b03031af","Type":"ContainerStarted","Data":"b19befbc3c0b5fac8c31a702e63e6729a24798bbf3567793e0a8bce887d297d6"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.792032 4941 generic.go:334] "Generic (PLEG): container finished" podID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerID="9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e" exitCode=0 Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.792564 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zsjd4" event={"ID":"a60b8750-10a6-40dc-8aea-d3f3f95a6597","Type":"ContainerDied","Data":"9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.792587 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zsjd4" event={"ID":"a60b8750-10a6-40dc-8aea-d3f3f95a6597","Type":"ContainerStarted","Data":"7b3bbcd895ba6eac74e8359497dc383518063aa9ee812ae386f34ed854bf7bd4"} Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.832534 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" podStartSLOduration=134.832506142 podStartE2EDuration="2m14.832506142s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:55.824066448 +0000 UTC m=+156.592238057" watchObservedRunningTime="2025-11-30 06:48:55.832506142 +0000 UTC m=+156.600677751" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.879448 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:55 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:55 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:55 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.879565 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.883649 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.890141 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-frzl6" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.922552 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.922968 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:55 crc kubenswrapper[4941]: I1130 06:48:55.947476 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.269210 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.270284 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.272916 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.273203 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.279204 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.340556 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.340703 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.441616 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.441737 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.441836 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.463810 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.490868 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9lzhs"] Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.492004 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.497610 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.505280 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9lzhs"] Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.542881 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-catalog-content\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.542936 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zmsw\" (UniqueName: \"kubernetes.io/projected/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-kube-api-access-9zmsw\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.543010 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-utilities\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.644453 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-utilities\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.644747 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-catalog-content\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.644834 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zmsw\" (UniqueName: \"kubernetes.io/projected/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-kube-api-access-9zmsw\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.644923 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-utilities\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.645404 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-catalog-content\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.649318 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.675225 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zmsw\" (UniqueName: \"kubernetes.io/projected/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-kube-api-access-9zmsw\") pod \"redhat-marketplace-9lzhs\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.810010 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.819392 4941 generic.go:334] "Generic (PLEG): container finished" podID="596db448-f0db-4e85-bb61-465962d12a40" containerID="b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879" exitCode=0 Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.819468 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7rpb" event={"ID":"596db448-f0db-4e85-bb61-465962d12a40","Type":"ContainerDied","Data":"b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879"} Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.843207 4941 generic.go:334] "Generic (PLEG): container finished" podID="773d34d4-1723-42c4-8b83-43f629b630c2" containerID="cb07af51b3105ae67f0fe232c46fd6912a357c344881a250b64edafdf6ae76ab" exitCode=0 Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.844246 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" event={"ID":"773d34d4-1723-42c4-8b83-43f629b630c2","Type":"ContainerDied","Data":"cb07af51b3105ae67f0fe232c46fd6912a357c344881a250b64edafdf6ae76ab"} Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.855573 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-rb8kg" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.884268 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:56 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:56 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:56 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.884715 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.892834 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hhb7m"] Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.895011 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:56 crc kubenswrapper[4941]: I1130 06:48:56.933782 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hhb7m"] Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.058107 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-utilities\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.058159 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-catalog-content\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.058243 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bvvc\" (UniqueName: \"kubernetes.io/projected/fe26a6e1-683c-4afa-8e12-454e55a8e950-kube-api-access-4bvvc\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.082569 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.160090 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bvvc\" (UniqueName: \"kubernetes.io/projected/fe26a6e1-683c-4afa-8e12-454e55a8e950-kube-api-access-4bvvc\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.160180 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-utilities\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.160210 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-catalog-content\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.160788 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-utilities\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.160877 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-catalog-content\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.187538 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bvvc\" (UniqueName: \"kubernetes.io/projected/fe26a6e1-683c-4afa-8e12-454e55a8e950-kube-api-access-4bvvc\") pod \"redhat-marketplace-hhb7m\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.205688 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9lzhs"] Nov 30 06:48:57 crc kubenswrapper[4941]: W1130 06:48:57.218776 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7647c27f_b440_4d1c_8e6f_ffa56aa08a3f.slice/crio-27f7fbaa065bbd7f39f85e4ee5e98c3468595e973206ffea8530006bdcf9e6f5 WatchSource:0}: Error finding container 27f7fbaa065bbd7f39f85e4ee5e98c3468595e973206ffea8530006bdcf9e6f5: Status 404 returned error can't find the container with id 27f7fbaa065bbd7f39f85e4ee5e98c3468595e973206ffea8530006bdcf9e6f5 Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.225958 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.232200 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.232315 4941 patch_prober.go:28] interesting pod/downloads-7954f5f757-vkq5m container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.232406 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-vkq5m" podUID="542c0442-1fb2-4e3e-ba06-a14526cf98ce" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.232388 4941 patch_prober.go:28] interesting pod/downloads-7954f5f757-vkq5m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.232511 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-vkq5m" podUID="542c0442-1fb2-4e3e-ba06-a14526cf98ce" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.21:8080/\": dial tcp 10.217.0.21:8080: connect: connection refused" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.232619 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.234689 4941 patch_prober.go:28] interesting pod/console-f9d7485db-css84 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.234767 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-css84" podUID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.7:8443/health\": dial tcp 10.217.0.7:8443: connect: connection refused" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.566069 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hhb7m"] Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.681948 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dzm96"] Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.683202 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.689746 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.707927 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dzm96"] Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.780067 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-utilities\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.781221 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-catalog-content\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.781306 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hvvf\" (UniqueName: \"kubernetes.io/projected/a44f18a4-b76a-48fa-b2e9-df18115674d3-kube-api-access-6hvvf\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.874589 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.880036 4941 generic.go:334] "Generic (PLEG): container finished" podID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerID="9c82a8e97a95c88ea99448ae35c8a31321f662baf212b93d05541597250ac65c" exitCode=0 Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.880282 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerDied","Data":"9c82a8e97a95c88ea99448ae35c8a31321f662baf212b93d05541597250ac65c"} Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.880368 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerStarted","Data":"3c8bd2403fca4f6118c30b7e4c088617bb61773d1764730999f19aba1d3f716d"} Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.882003 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:57 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:57 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:57 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.882129 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.884142 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-catalog-content\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.884178 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hvvf\" (UniqueName: \"kubernetes.io/projected/a44f18a4-b76a-48fa-b2e9-df18115674d3-kube-api-access-6hvvf\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.884266 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-utilities\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.884841 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-utilities\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.886208 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-catalog-content\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.889600 4941 generic.go:334] "Generic (PLEG): container finished" podID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerID="ec3c93357795ff7d7e1fa7e47dc4e2a6285e98a4c5bea560fda93297d71a6f09" exitCode=0 Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.889708 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9lzhs" event={"ID":"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f","Type":"ContainerDied","Data":"ec3c93357795ff7d7e1fa7e47dc4e2a6285e98a4c5bea560fda93297d71a6f09"} Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.889774 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9lzhs" event={"ID":"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f","Type":"ContainerStarted","Data":"27f7fbaa065bbd7f39f85e4ee5e98c3468595e973206ffea8530006bdcf9e6f5"} Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.909566 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8d66150d-6ee6-469f-a9f6-6820b4cc6df3","Type":"ContainerStarted","Data":"5086f65d18d86b22171151455f9ef11fa2fdfefcf0d47a51a69c2ad9973fb781"} Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.937360 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hvvf\" (UniqueName: \"kubernetes.io/projected/a44f18a4-b76a-48fa-b2e9-df18115674d3-kube-api-access-6hvvf\") pod \"redhat-operators-dzm96\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:57 crc kubenswrapper[4941]: I1130 06:48:57.985770 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.985747111 podStartE2EDuration="1.985747111s" podCreationTimestamp="2025-11-30 06:48:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:48:57.957284981 +0000 UTC m=+158.725456590" watchObservedRunningTime="2025-11-30 06:48:57.985747111 +0000 UTC m=+158.753918720" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.057689 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.094062 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nwk4x"] Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.095270 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.104532 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nwk4x"] Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.189275 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-utilities\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.189385 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-catalog-content\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.189429 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsmtv\" (UniqueName: \"kubernetes.io/projected/ea77dd7e-611a-4a66-8ae6-8f45472ea609-kube-api-access-dsmtv\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.272091 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.290396 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-catalog-content\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.290444 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsmtv\" (UniqueName: \"kubernetes.io/projected/ea77dd7e-611a-4a66-8ae6-8f45472ea609-kube-api-access-dsmtv\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.290488 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-utilities\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.291481 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-utilities\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.291710 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-catalog-content\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.314760 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsmtv\" (UniqueName: \"kubernetes.io/projected/ea77dd7e-611a-4a66-8ae6-8f45472ea609-kube-api-access-dsmtv\") pod \"redhat-operators-nwk4x\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.391445 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dzm96"] Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.391816 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume\") pod \"773d34d4-1723-42c4-8b83-43f629b630c2\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.392006 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc84p\" (UniqueName: \"kubernetes.io/projected/773d34d4-1723-42c4-8b83-43f629b630c2-kube-api-access-qc84p\") pod \"773d34d4-1723-42c4-8b83-43f629b630c2\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.392073 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume\") pod \"773d34d4-1723-42c4-8b83-43f629b630c2\" (UID: \"773d34d4-1723-42c4-8b83-43f629b630c2\") " Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.393313 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume" (OuterVolumeSpecName: "config-volume") pod "773d34d4-1723-42c4-8b83-43f629b630c2" (UID: "773d34d4-1723-42c4-8b83-43f629b630c2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.397257 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "773d34d4-1723-42c4-8b83-43f629b630c2" (UID: "773d34d4-1723-42c4-8b83-43f629b630c2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.397683 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/773d34d4-1723-42c4-8b83-43f629b630c2-kube-api-access-qc84p" (OuterVolumeSpecName: "kube-api-access-qc84p") pod "773d34d4-1723-42c4-8b83-43f629b630c2" (UID: "773d34d4-1723-42c4-8b83-43f629b630c2"). InnerVolumeSpecName "kube-api-access-qc84p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.419153 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:48:58 crc kubenswrapper[4941]: W1130 06:48:58.422207 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda44f18a4_b76a_48fa_b2e9_df18115674d3.slice/crio-138568fa5675ad5235be2cd91904159aca59671b617ce3e54128341b4e96a8e8 WatchSource:0}: Error finding container 138568fa5675ad5235be2cd91904159aca59671b617ce3e54128341b4e96a8e8: Status 404 returned error can't find the container with id 138568fa5675ad5235be2cd91904159aca59671b617ce3e54128341b4e96a8e8 Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.497210 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/773d34d4-1723-42c4-8b83-43f629b630c2-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.497245 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc84p\" (UniqueName: \"kubernetes.io/projected/773d34d4-1723-42c4-8b83-43f629b630c2-kube-api-access-qc84p\") on node \"crc\" DevicePath \"\"" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.497254 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/773d34d4-1723-42c4-8b83-43f629b630c2-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.880485 4941 patch_prober.go:28] interesting pod/router-default-5444994796-rwj5c container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 30 06:48:58 crc kubenswrapper[4941]: [-]has-synced failed: reason withheld Nov 30 06:48:58 crc kubenswrapper[4941]: [+]process-running ok Nov 30 06:48:58 crc kubenswrapper[4941]: healthz check failed Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.881457 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rwj5c" podUID="fb91f88b-4fc5-464d-ac96-88b0828e67c0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.902084 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nwk4x"] Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.951137 4941 generic.go:334] "Generic (PLEG): container finished" podID="8d66150d-6ee6-469f-a9f6-6820b4cc6df3" containerID="3fa1c4af44ab842b4bd7e15ce238cab3843e092dd0367b5f75087d6ab7a2eb6c" exitCode=0 Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.951257 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8d66150d-6ee6-469f-a9f6-6820b4cc6df3","Type":"ContainerDied","Data":"3fa1c4af44ab842b4bd7e15ce238cab3843e092dd0367b5f75087d6ab7a2eb6c"} Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.966126 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" event={"ID":"773d34d4-1723-42c4-8b83-43f629b630c2","Type":"ContainerDied","Data":"39e4a45189d9d9ce72e70cbefe3545414bed94e52da2d5c350e73f030498a1bb"} Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.966187 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.966189 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39e4a45189d9d9ce72e70cbefe3545414bed94e52da2d5c350e73f030498a1bb" Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.985772 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerStarted","Data":"d01abf5371fe6bacbd9b4abaab16e6589f34dc3cbcba07e0ba6066346b9ba6ed"} Nov 30 06:48:58 crc kubenswrapper[4941]: I1130 06:48:58.985825 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerStarted","Data":"138568fa5675ad5235be2cd91904159aca59671b617ce3e54128341b4e96a8e8"} Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.692208 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 30 06:48:59 crc kubenswrapper[4941]: E1130 06:48:59.695777 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="773d34d4-1723-42c4-8b83-43f629b630c2" containerName="collect-profiles" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.695801 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="773d34d4-1723-42c4-8b83-43f629b630c2" containerName="collect-profiles" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.695910 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="773d34d4-1723-42c4-8b83-43f629b630c2" containerName="collect-profiles" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.696482 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.700624 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.700711 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.700831 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.821059 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5a53242-f883-416d-9453-caa8ad88f08f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.821170 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5a53242-f883-416d-9453-caa8ad88f08f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.876179 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.880967 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-rwj5c" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.922205 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5a53242-f883-416d-9453-caa8ad88f08f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.922293 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5a53242-f883-416d-9453-caa8ad88f08f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.923648 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5a53242-f883-416d-9453-caa8ad88f08f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:48:59 crc kubenswrapper[4941]: I1130 06:48:59.980202 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5a53242-f883-416d-9453-caa8ad88f08f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.030448 4941 generic.go:334] "Generic (PLEG): container finished" podID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerID="d01abf5371fe6bacbd9b4abaab16e6589f34dc3cbcba07e0ba6066346b9ba6ed" exitCode=0 Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.030537 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerDied","Data":"d01abf5371fe6bacbd9b4abaab16e6589f34dc3cbcba07e0ba6066346b9ba6ed"} Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.032702 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.046339 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerID="5fe8af0326c28ec19cf74b62819842c5dc131bf33485a9f110c2140cf94d4f2e" exitCode=0 Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.047208 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerDied","Data":"5fe8af0326c28ec19cf74b62819842c5dc131bf33485a9f110c2140cf94d4f2e"} Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.047249 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerStarted","Data":"b7ab1e42ca5bd15e4d75d96ca10706768105f0b8072c18e0bef6d8aefeeee9b2"} Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.474080 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.511783 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:49:00 crc kubenswrapper[4941]: W1130 06:49:00.570423 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda5a53242_f883_416d_9453_caa8ad88f08f.slice/crio-d60caac7258d0aecfcd090a2088279f27822e4e50e9770930f74eea821ab43a2 WatchSource:0}: Error finding container d60caac7258d0aecfcd090a2088279f27822e4e50e9770930f74eea821ab43a2: Status 404 returned error can't find the container with id d60caac7258d0aecfcd090a2088279f27822e4e50e9770930f74eea821ab43a2 Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.640102 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kube-api-access\") pod \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.640199 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kubelet-dir\") pod \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\" (UID: \"8d66150d-6ee6-469f-a9f6-6820b4cc6df3\") " Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.640499 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8d66150d-6ee6-469f-a9f6-6820b4cc6df3" (UID: "8d66150d-6ee6-469f-a9f6-6820b4cc6df3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.647267 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8d66150d-6ee6-469f-a9f6-6820b4cc6df3" (UID: "8d66150d-6ee6-469f-a9f6-6820b4cc6df3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.741238 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:00 crc kubenswrapper[4941]: I1130 06:49:00.741269 4941 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8d66150d-6ee6-469f-a9f6-6820b4cc6df3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:01 crc kubenswrapper[4941]: I1130 06:49:01.084114 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5a53242-f883-416d-9453-caa8ad88f08f","Type":"ContainerStarted","Data":"d60caac7258d0aecfcd090a2088279f27822e4e50e9770930f74eea821ab43a2"} Nov 30 06:49:01 crc kubenswrapper[4941]: I1130 06:49:01.086745 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8d66150d-6ee6-469f-a9f6-6820b4cc6df3","Type":"ContainerDied","Data":"5086f65d18d86b22171151455f9ef11fa2fdfefcf0d47a51a69c2ad9973fb781"} Nov 30 06:49:01 crc kubenswrapper[4941]: I1130 06:49:01.086795 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5086f65d18d86b22171151455f9ef11fa2fdfefcf0d47a51a69c2ad9973fb781" Nov 30 06:49:01 crc kubenswrapper[4941]: I1130 06:49:01.086808 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 30 06:49:02 crc kubenswrapper[4941]: I1130 06:49:02.111733 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5a53242-f883-416d-9453-caa8ad88f08f","Type":"ContainerStarted","Data":"13c949bfca530c93ad8fb2b829ad427a6d61611166ac8712e48e4030a4e7d4b9"} Nov 30 06:49:02 crc kubenswrapper[4941]: I1130 06:49:02.131890 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.131865737 podStartE2EDuration="3.131865737s" podCreationTimestamp="2025-11-30 06:48:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:49:02.127359606 +0000 UTC m=+162.895531225" watchObservedRunningTime="2025-11-30 06:49:02.131865737 +0000 UTC m=+162.900037346" Nov 30 06:49:02 crc kubenswrapper[4941]: I1130 06:49:02.514155 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-st8vq" Nov 30 06:49:02 crc kubenswrapper[4941]: I1130 06:49:02.978910 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:49:02 crc kubenswrapper[4941]: I1130 06:49:02.979023 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:49:03 crc kubenswrapper[4941]: I1130 06:49:03.135652 4941 generic.go:334] "Generic (PLEG): container finished" podID="a5a53242-f883-416d-9453-caa8ad88f08f" containerID="13c949bfca530c93ad8fb2b829ad427a6d61611166ac8712e48e4030a4e7d4b9" exitCode=0 Nov 30 06:49:03 crc kubenswrapper[4941]: I1130 06:49:03.135729 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5a53242-f883-416d-9453-caa8ad88f08f","Type":"ContainerDied","Data":"13c949bfca530c93ad8fb2b829ad427a6d61611166ac8712e48e4030a4e7d4b9"} Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.241416 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.248858 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ba34d142-c6e9-45bd-93a4-cf8e15558381-metrics-certs\") pod \"network-metrics-daemon-vwfsk\" (UID: \"ba34d142-c6e9-45bd-93a4-cf8e15558381\") " pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.461296 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-vwfsk" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.506907 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.545157 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5a53242-f883-416d-9453-caa8ad88f08f-kube-api-access\") pod \"a5a53242-f883-416d-9453-caa8ad88f08f\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.545721 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5a53242-f883-416d-9453-caa8ad88f08f-kubelet-dir\") pod \"a5a53242-f883-416d-9453-caa8ad88f08f\" (UID: \"a5a53242-f883-416d-9453-caa8ad88f08f\") " Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.545839 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a5a53242-f883-416d-9453-caa8ad88f08f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a5a53242-f883-416d-9453-caa8ad88f08f" (UID: "a5a53242-f883-416d-9453-caa8ad88f08f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.546190 4941 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a5a53242-f883-416d-9453-caa8ad88f08f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.549177 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a53242-f883-416d-9453-caa8ad88f08f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a5a53242-f883-416d-9453-caa8ad88f08f" (UID: "a5a53242-f883-416d-9453-caa8ad88f08f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.649262 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a5a53242-f883-416d-9453-caa8ad88f08f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:04 crc kubenswrapper[4941]: I1130 06:49:04.840184 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-vwfsk"] Nov 30 06:49:04 crc kubenswrapper[4941]: W1130 06:49:04.869539 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podba34d142_c6e9_45bd_93a4_cf8e15558381.slice/crio-3a2a5d3ecd5d28aa3c9c2e3c6f4aa1fd6db7e117622ab9067d5bfd6d62e92dc2 WatchSource:0}: Error finding container 3a2a5d3ecd5d28aa3c9c2e3c6f4aa1fd6db7e117622ab9067d5bfd6d62e92dc2: Status 404 returned error can't find the container with id 3a2a5d3ecd5d28aa3c9c2e3c6f4aa1fd6db7e117622ab9067d5bfd6d62e92dc2 Nov 30 06:49:05 crc kubenswrapper[4941]: I1130 06:49:05.173709 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" event={"ID":"ba34d142-c6e9-45bd-93a4-cf8e15558381","Type":"ContainerStarted","Data":"3a2a5d3ecd5d28aa3c9c2e3c6f4aa1fd6db7e117622ab9067d5bfd6d62e92dc2"} Nov 30 06:49:05 crc kubenswrapper[4941]: I1130 06:49:05.180309 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"a5a53242-f883-416d-9453-caa8ad88f08f","Type":"ContainerDied","Data":"d60caac7258d0aecfcd090a2088279f27822e4e50e9770930f74eea821ab43a2"} Nov 30 06:49:05 crc kubenswrapper[4941]: I1130 06:49:05.180374 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 30 06:49:05 crc kubenswrapper[4941]: I1130 06:49:05.180376 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d60caac7258d0aecfcd090a2088279f27822e4e50e9770930f74eea821ab43a2" Nov 30 06:49:07 crc kubenswrapper[4941]: I1130 06:49:07.242206 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:49:07 crc kubenswrapper[4941]: I1130 06:49:07.243424 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-vkq5m" Nov 30 06:49:07 crc kubenswrapper[4941]: I1130 06:49:07.246832 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:49:11 crc kubenswrapper[4941]: I1130 06:49:11.221184 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" event={"ID":"ba34d142-c6e9-45bd-93a4-cf8e15558381","Type":"ContainerStarted","Data":"f253ef8e735e48eb5036d2e0f1613b63791646142ae8b4f167fb4ee9c0a61da8"} Nov 30 06:49:14 crc kubenswrapper[4941]: I1130 06:49:14.395553 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:49:23 crc kubenswrapper[4941]: E1130 06:49:23.594570 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 30 06:49:23 crc kubenswrapper[4941]: E1130 06:49:23.595503 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4bvvc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-hhb7m_openshift-marketplace(fe26a6e1-683c-4afa-8e12-454e55a8e950): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 30 06:49:23 crc kubenswrapper[4941]: E1130 06:49:23.596747 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-hhb7m" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" Nov 30 06:49:25 crc kubenswrapper[4941]: E1130 06:49:25.193451 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-hhb7m" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" Nov 30 06:49:25 crc kubenswrapper[4941]: E1130 06:49:25.267994 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 30 06:49:25 crc kubenswrapper[4941]: E1130 06:49:25.268370 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9zmsw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-9lzhs_openshift-marketplace(7647c27f-b440-4d1c-8e6f-ffa56aa08a3f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 30 06:49:25 crc kubenswrapper[4941]: E1130 06:49:25.269576 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-9lzhs" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" Nov 30 06:49:26 crc kubenswrapper[4941]: I1130 06:49:26.644733 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 30 06:49:27 crc kubenswrapper[4941]: I1130 06:49:27.775303 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-dgbsb" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.444282 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-9lzhs" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.543574 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.544141 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6hvvf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-dzm96_openshift-marketplace(a44f18a4-b76a-48fa-b2e9-df18115674d3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.545959 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-dzm96" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.607342 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.607473 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n5nsw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-sx2h8_openshift-marketplace(4348bd9e-d48d-41a7-9cbc-14620a7a8aa5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.608761 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-sx2h8" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.646810 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.646979 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dsmtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-nwk4x_openshift-marketplace(ea77dd7e-611a-4a66-8ae6-8f45472ea609): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 30 06:49:28 crc kubenswrapper[4941]: E1130 06:49:28.648507 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-nwk4x" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.348609 4941 generic.go:334] "Generic (PLEG): container finished" podID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerID="a5c21cf456e6e7f08dbd41abfcedcde72124061b1b1f53702b1f5227fc62972c" exitCode=0 Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.348682 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5dr7" event={"ID":"295ff105-17f2-40dc-96ab-8cc8b03031af","Type":"ContainerDied","Data":"a5c21cf456e6e7f08dbd41abfcedcde72124061b1b1f53702b1f5227fc62972c"} Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.350975 4941 generic.go:334] "Generic (PLEG): container finished" podID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerID="d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b" exitCode=0 Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.351030 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zsjd4" event={"ID":"a60b8750-10a6-40dc-8aea-d3f3f95a6597","Type":"ContainerDied","Data":"d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b"} Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.366269 4941 generic.go:334] "Generic (PLEG): container finished" podID="596db448-f0db-4e85-bb61-465962d12a40" containerID="005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d" exitCode=0 Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.366411 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7rpb" event={"ID":"596db448-f0db-4e85-bb61-465962d12a40","Type":"ContainerDied","Data":"005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d"} Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.371423 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-vwfsk" event={"ID":"ba34d142-c6e9-45bd-93a4-cf8e15558381","Type":"ContainerStarted","Data":"6689922702af7cdd0fffef92f9a342a60d1f803b9e7437731e9c97fecc60548d"} Nov 30 06:49:29 crc kubenswrapper[4941]: E1130 06:49:29.373590 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-nwk4x" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" Nov 30 06:49:29 crc kubenswrapper[4941]: E1130 06:49:29.375712 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-dzm96" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" Nov 30 06:49:29 crc kubenswrapper[4941]: E1130 06:49:29.379164 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-sx2h8" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" Nov 30 06:49:29 crc kubenswrapper[4941]: I1130 06:49:29.459144 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-vwfsk" podStartSLOduration=168.459121574 podStartE2EDuration="2m48.459121574s" podCreationTimestamp="2025-11-30 06:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:49:29.453178518 +0000 UTC m=+190.221350167" watchObservedRunningTime="2025-11-30 06:49:29.459121574 +0000 UTC m=+190.227293183" Nov 30 06:49:30 crc kubenswrapper[4941]: I1130 06:49:30.379640 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7rpb" event={"ID":"596db448-f0db-4e85-bb61-465962d12a40","Type":"ContainerStarted","Data":"4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30"} Nov 30 06:49:30 crc kubenswrapper[4941]: I1130 06:49:30.382746 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5dr7" event={"ID":"295ff105-17f2-40dc-96ab-8cc8b03031af","Type":"ContainerStarted","Data":"e200e68ea283cf309f8e679e749c6679f254b837656fc124799d8ac6d43377dc"} Nov 30 06:49:30 crc kubenswrapper[4941]: I1130 06:49:30.385474 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zsjd4" event={"ID":"a60b8750-10a6-40dc-8aea-d3f3f95a6597","Type":"ContainerStarted","Data":"7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860"} Nov 30 06:49:30 crc kubenswrapper[4941]: I1130 06:49:30.407118 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j7rpb" podStartSLOduration=2.501699412 podStartE2EDuration="35.407086232s" podCreationTimestamp="2025-11-30 06:48:55 +0000 UTC" firstStartedPulling="2025-11-30 06:48:56.853281106 +0000 UTC m=+157.621452715" lastFinishedPulling="2025-11-30 06:49:29.758667926 +0000 UTC m=+190.526839535" observedRunningTime="2025-11-30 06:49:30.404405228 +0000 UTC m=+191.172576837" watchObservedRunningTime="2025-11-30 06:49:30.407086232 +0000 UTC m=+191.175257861" Nov 30 06:49:30 crc kubenswrapper[4941]: I1130 06:49:30.420377 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h5dr7" podStartSLOduration=2.395789717 podStartE2EDuration="36.420360716s" podCreationTimestamp="2025-11-30 06:48:54 +0000 UTC" firstStartedPulling="2025-11-30 06:48:55.79116139 +0000 UTC m=+156.559332999" lastFinishedPulling="2025-11-30 06:49:29.815732389 +0000 UTC m=+190.583903998" observedRunningTime="2025-11-30 06:49:30.418685164 +0000 UTC m=+191.186856783" watchObservedRunningTime="2025-11-30 06:49:30.420360716 +0000 UTC m=+191.188532335" Nov 30 06:49:30 crc kubenswrapper[4941]: I1130 06:49:30.442007 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zsjd4" podStartSLOduration=2.362616801 podStartE2EDuration="36.441975613s" podCreationTimestamp="2025-11-30 06:48:54 +0000 UTC" firstStartedPulling="2025-11-30 06:48:55.793847234 +0000 UTC m=+156.562018843" lastFinishedPulling="2025-11-30 06:49:29.873206046 +0000 UTC m=+190.641377655" observedRunningTime="2025-11-30 06:49:30.438681009 +0000 UTC m=+191.206852618" watchObservedRunningTime="2025-11-30 06:49:30.441975613 +0000 UTC m=+191.210147222" Nov 30 06:49:32 crc kubenswrapper[4941]: I1130 06:49:32.979079 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:49:32 crc kubenswrapper[4941]: I1130 06:49:32.979487 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.884884 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 30 06:49:34 crc kubenswrapper[4941]: E1130 06:49:34.885663 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d66150d-6ee6-469f-a9f6-6820b4cc6df3" containerName="pruner" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.885678 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d66150d-6ee6-469f-a9f6-6820b4cc6df3" containerName="pruner" Nov 30 06:49:34 crc kubenswrapper[4941]: E1130 06:49:34.885701 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a53242-f883-416d-9453-caa8ad88f08f" containerName="pruner" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.885708 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a53242-f883-416d-9453-caa8ad88f08f" containerName="pruner" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.885807 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d66150d-6ee6-469f-a9f6-6820b4cc6df3" containerName="pruner" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.885819 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a53242-f883-416d-9453-caa8ad88f08f" containerName="pruner" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.886312 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.888403 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.888728 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 30 06:49:34 crc kubenswrapper[4941]: I1130 06:49:34.897552 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.016403 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.016588 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.118713 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.118811 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.118904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.123928 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.124044 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.138515 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.201000 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.214590 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.214760 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.222745 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.256528 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.455521 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.460198 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.499783 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.499834 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.542645 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:49:35 crc kubenswrapper[4941]: I1130 06:49:35.681517 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 30 06:49:36 crc kubenswrapper[4941]: I1130 06:49:36.420658 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8108670f-9dd4-44df-b1fb-b27dd4a78d62","Type":"ContainerStarted","Data":"95fa68e5e25600a4663296c53550431c25e9f071626c34fc154d6a3309bc2a88"} Nov 30 06:49:36 crc kubenswrapper[4941]: I1130 06:49:36.421153 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8108670f-9dd4-44df-b1fb-b27dd4a78d62","Type":"ContainerStarted","Data":"b1751ffdd07ccd052e72f9b3607bd29b05217033957765664d2e4d1fc263b0e5"} Nov 30 06:49:36 crc kubenswrapper[4941]: I1130 06:49:36.441460 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.441412783 podStartE2EDuration="2.441412783s" podCreationTimestamp="2025-11-30 06:49:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:49:36.439905872 +0000 UTC m=+197.208077491" watchObservedRunningTime="2025-11-30 06:49:36.441412783 +0000 UTC m=+197.209584382" Nov 30 06:49:36 crc kubenswrapper[4941]: I1130 06:49:36.475319 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:49:37 crc kubenswrapper[4941]: I1130 06:49:37.429085 4941 generic.go:334] "Generic (PLEG): container finished" podID="8108670f-9dd4-44df-b1fb-b27dd4a78d62" containerID="95fa68e5e25600a4663296c53550431c25e9f071626c34fc154d6a3309bc2a88" exitCode=0 Nov 30 06:49:37 crc kubenswrapper[4941]: I1130 06:49:37.430965 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8108670f-9dd4-44df-b1fb-b27dd4a78d62","Type":"ContainerDied","Data":"95fa68e5e25600a4663296c53550431c25e9f071626c34fc154d6a3309bc2a88"} Nov 30 06:49:37 crc kubenswrapper[4941]: I1130 06:49:37.563014 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zsjd4"] Nov 30 06:49:37 crc kubenswrapper[4941]: I1130 06:49:37.755125 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j7rpb"] Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.436176 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerStarted","Data":"fd27bc67ee98bd0c07e931457cfaabd4c8532938e9fdde0618646a54862c6254"} Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.436314 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zsjd4" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="registry-server" containerID="cri-o://7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860" gracePeriod=2 Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.436525 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j7rpb" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="registry-server" containerID="cri-o://4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30" gracePeriod=2 Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.718636 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.804204 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.810432 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.895870 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kube-api-access\") pod \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.896399 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gct7q\" (UniqueName: \"kubernetes.io/projected/a60b8750-10a6-40dc-8aea-d3f3f95a6597-kube-api-access-gct7q\") pod \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.896540 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kubelet-dir\") pod \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\" (UID: \"8108670f-9dd4-44df-b1fb-b27dd4a78d62\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.896628 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-catalog-content\") pod \"596db448-f0db-4e85-bb61-465962d12a40\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.897254 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8108670f-9dd4-44df-b1fb-b27dd4a78d62" (UID: "8108670f-9dd4-44df-b1fb-b27dd4a78d62"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.901903 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a60b8750-10a6-40dc-8aea-d3f3f95a6597-kube-api-access-gct7q" (OuterVolumeSpecName: "kube-api-access-gct7q") pod "a60b8750-10a6-40dc-8aea-d3f3f95a6597" (UID: "a60b8750-10a6-40dc-8aea-d3f3f95a6597"). InnerVolumeSpecName "kube-api-access-gct7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.904977 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8108670f-9dd4-44df-b1fb-b27dd4a78d62" (UID: "8108670f-9dd4-44df-b1fb-b27dd4a78d62"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.969900 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "596db448-f0db-4e85-bb61-465962d12a40" (UID: "596db448-f0db-4e85-bb61-465962d12a40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.997991 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-catalog-content\") pod \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998044 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-utilities\") pod \"596db448-f0db-4e85-bb61-465962d12a40\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998074 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-utilities\") pod \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\" (UID: \"a60b8750-10a6-40dc-8aea-d3f3f95a6597\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998116 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9s6j\" (UniqueName: \"kubernetes.io/projected/596db448-f0db-4e85-bb61-465962d12a40-kube-api-access-x9s6j\") pod \"596db448-f0db-4e85-bb61-465962d12a40\" (UID: \"596db448-f0db-4e85-bb61-465962d12a40\") " Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998278 4941 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998292 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998303 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8108670f-9dd4-44df-b1fb-b27dd4a78d62-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.998312 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gct7q\" (UniqueName: \"kubernetes.io/projected/a60b8750-10a6-40dc-8aea-d3f3f95a6597-kube-api-access-gct7q\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.999043 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-utilities" (OuterVolumeSpecName: "utilities") pod "a60b8750-10a6-40dc-8aea-d3f3f95a6597" (UID: "a60b8750-10a6-40dc-8aea-d3f3f95a6597"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:49:38 crc kubenswrapper[4941]: I1130 06:49:38.999175 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-utilities" (OuterVolumeSpecName: "utilities") pod "596db448-f0db-4e85-bb61-465962d12a40" (UID: "596db448-f0db-4e85-bb61-465962d12a40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.000971 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596db448-f0db-4e85-bb61-465962d12a40-kube-api-access-x9s6j" (OuterVolumeSpecName: "kube-api-access-x9s6j") pod "596db448-f0db-4e85-bb61-465962d12a40" (UID: "596db448-f0db-4e85-bb61-465962d12a40"). InnerVolumeSpecName "kube-api-access-x9s6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.047120 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a60b8750-10a6-40dc-8aea-d3f3f95a6597" (UID: "a60b8750-10a6-40dc-8aea-d3f3f95a6597"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.099369 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.099417 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9s6j\" (UniqueName: \"kubernetes.io/projected/596db448-f0db-4e85-bb61-465962d12a40-kube-api-access-x9s6j\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.099433 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a60b8750-10a6-40dc-8aea-d3f3f95a6597-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.099446 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/596db448-f0db-4e85-bb61-465962d12a40-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.447350 4941 generic.go:334] "Generic (PLEG): container finished" podID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerID="7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860" exitCode=0 Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.447500 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zsjd4" event={"ID":"a60b8750-10a6-40dc-8aea-d3f3f95a6597","Type":"ContainerDied","Data":"7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860"} Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.447531 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zsjd4" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.447562 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zsjd4" event={"ID":"a60b8750-10a6-40dc-8aea-d3f3f95a6597","Type":"ContainerDied","Data":"7b3bbcd895ba6eac74e8359497dc383518063aa9ee812ae386f34ed854bf7bd4"} Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.447594 4941 scope.go:117] "RemoveContainer" containerID="7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.453235 4941 generic.go:334] "Generic (PLEG): container finished" podID="596db448-f0db-4e85-bb61-465962d12a40" containerID="4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30" exitCode=0 Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.453303 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7rpb" event={"ID":"596db448-f0db-4e85-bb61-465962d12a40","Type":"ContainerDied","Data":"4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30"} Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.453361 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j7rpb" event={"ID":"596db448-f0db-4e85-bb61-465962d12a40","Type":"ContainerDied","Data":"1cc232e82659ceef4b4b7eb5d34a71544d761fa5803f33a33d3eb9d6bc6e1f85"} Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.453638 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j7rpb" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.455086 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8108670f-9dd4-44df-b1fb-b27dd4a78d62","Type":"ContainerDied","Data":"b1751ffdd07ccd052e72f9b3607bd29b05217033957765664d2e4d1fc263b0e5"} Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.455112 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1751ffdd07ccd052e72f9b3607bd29b05217033957765664d2e4d1fc263b0e5" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.455116 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.457475 4941 generic.go:334] "Generic (PLEG): container finished" podID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerID="fd27bc67ee98bd0c07e931457cfaabd4c8532938e9fdde0618646a54862c6254" exitCode=0 Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.457503 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerDied","Data":"fd27bc67ee98bd0c07e931457cfaabd4c8532938e9fdde0618646a54862c6254"} Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.481389 4941 scope.go:117] "RemoveContainer" containerID="d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.502700 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zsjd4"] Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.506316 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zsjd4"] Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.537532 4941 scope.go:117] "RemoveContainer" containerID="9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.537655 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" path="/var/lib/kubelet/pods/a60b8750-10a6-40dc-8aea-d3f3f95a6597/volumes" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.538673 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j7rpb"] Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.538720 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j7rpb"] Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.560597 4941 scope.go:117] "RemoveContainer" containerID="7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860" Nov 30 06:49:39 crc kubenswrapper[4941]: E1130 06:49:39.561217 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860\": container with ID starting with 7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860 not found: ID does not exist" containerID="7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.561257 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860"} err="failed to get container status \"7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860\": rpc error: code = NotFound desc = could not find container \"7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860\": container with ID starting with 7102c9987ecd3e8da7f45ba84ec650a3a3b23974a27579c2789df20c00bed860 not found: ID does not exist" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.561312 4941 scope.go:117] "RemoveContainer" containerID="d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b" Nov 30 06:49:39 crc kubenswrapper[4941]: E1130 06:49:39.561852 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b\": container with ID starting with d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b not found: ID does not exist" containerID="d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.561920 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b"} err="failed to get container status \"d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b\": rpc error: code = NotFound desc = could not find container \"d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b\": container with ID starting with d78e9d512d8b7dac0b0260d11bee113178662bfa8a2d0a1a042fbb34d5fa594b not found: ID does not exist" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.561982 4941 scope.go:117] "RemoveContainer" containerID="9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e" Nov 30 06:49:39 crc kubenswrapper[4941]: E1130 06:49:39.562497 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e\": container with ID starting with 9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e not found: ID does not exist" containerID="9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.562529 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e"} err="failed to get container status \"9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e\": rpc error: code = NotFound desc = could not find container \"9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e\": container with ID starting with 9d7697434fc4dde2e1259414ced777a5a1b0e4f26597fddf0823b533402d584e not found: ID does not exist" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.562552 4941 scope.go:117] "RemoveContainer" containerID="4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.579464 4941 scope.go:117] "RemoveContainer" containerID="005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.596068 4941 scope.go:117] "RemoveContainer" containerID="b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.613359 4941 scope.go:117] "RemoveContainer" containerID="4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30" Nov 30 06:49:39 crc kubenswrapper[4941]: E1130 06:49:39.613980 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30\": container with ID starting with 4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30 not found: ID does not exist" containerID="4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.614036 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30"} err="failed to get container status \"4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30\": rpc error: code = NotFound desc = could not find container \"4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30\": container with ID starting with 4d7f8adb5587e943850fbaa3a5b42a8023315b4abbd5bf811581824ee2752c30 not found: ID does not exist" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.614066 4941 scope.go:117] "RemoveContainer" containerID="005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d" Nov 30 06:49:39 crc kubenswrapper[4941]: E1130 06:49:39.614905 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d\": container with ID starting with 005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d not found: ID does not exist" containerID="005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.614943 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d"} err="failed to get container status \"005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d\": rpc error: code = NotFound desc = could not find container \"005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d\": container with ID starting with 005f284dc2f12edfd55f909e47204fde59cf0156bc4fac6ebf6458e98a9a173d not found: ID does not exist" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.614978 4941 scope.go:117] "RemoveContainer" containerID="b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879" Nov 30 06:49:39 crc kubenswrapper[4941]: E1130 06:49:39.615478 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879\": container with ID starting with b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879 not found: ID does not exist" containerID="b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879" Nov 30 06:49:39 crc kubenswrapper[4941]: I1130 06:49:39.615533 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879"} err="failed to get container status \"b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879\": rpc error: code = NotFound desc = could not find container \"b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879\": container with ID starting with b6a1ed38b52fe412cf7743a82b3e8f7f5ad6c73e6e2ab121fe5fcbd7cf24e879 not found: ID does not exist" Nov 30 06:49:40 crc kubenswrapper[4941]: I1130 06:49:40.465100 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerStarted","Data":"bfb5472b56146c15184c3111142a8814d7c20368f48c3117d79bd9d4daa0e9f6"} Nov 30 06:49:40 crc kubenswrapper[4941]: I1130 06:49:40.485569 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hhb7m" podStartSLOduration=2.23467703 podStartE2EDuration="44.485545059s" podCreationTimestamp="2025-11-30 06:48:56 +0000 UTC" firstStartedPulling="2025-11-30 06:48:57.907920499 +0000 UTC m=+158.676092118" lastFinishedPulling="2025-11-30 06:49:40.158788538 +0000 UTC m=+200.926960147" observedRunningTime="2025-11-30 06:49:40.484736302 +0000 UTC m=+201.252907921" watchObservedRunningTime="2025-11-30 06:49:40.485545059 +0000 UTC m=+201.253716668" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.477672 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerStarted","Data":"f530c531e5288f2689cff215e01e587137e113cf00a3e0d36561c4c9a158bd61"} Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.529999 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596db448-f0db-4e85-bb61-465962d12a40" path="/var/lib/kubelet/pods/596db448-f0db-4e85-bb61-465962d12a40/volumes" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.893983 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894360 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="extract-content" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894382 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="extract-content" Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894395 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="registry-server" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894425 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="registry-server" Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894437 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="extract-utilities" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894444 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="extract-utilities" Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894455 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="registry-server" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894463 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="registry-server" Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894475 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8108670f-9dd4-44df-b1fb-b27dd4a78d62" containerName="pruner" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894483 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8108670f-9dd4-44df-b1fb-b27dd4a78d62" containerName="pruner" Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894512 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="extract-content" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894518 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="extract-content" Nov 30 06:49:41 crc kubenswrapper[4941]: E1130 06:49:41.894528 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="extract-utilities" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894535 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="extract-utilities" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894681 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="596db448-f0db-4e85-bb61-465962d12a40" containerName="registry-server" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894694 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a60b8750-10a6-40dc-8aea-d3f3f95a6597" containerName="registry-server" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.894706 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8108670f-9dd4-44df-b1fb-b27dd4a78d62" containerName="pruner" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.895384 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.897481 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.898407 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 30 06:49:41 crc kubenswrapper[4941]: I1130 06:49:41.919648 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.042955 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.043008 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9266077-b4e5-4258-a860-02f87c13bb5c-kube-api-access\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.043537 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-var-lock\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.144381 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-var-lock\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.144492 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.144519 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9266077-b4e5-4258-a860-02f87c13bb5c-kube-api-access\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.144568 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-var-lock\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.145415 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-kubelet-dir\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.164649 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9266077-b4e5-4258-a860-02f87c13bb5c-kube-api-access\") pod \"installer-9-crc\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.250856 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.465899 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.489492 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c9266077-b4e5-4258-a860-02f87c13bb5c","Type":"ContainerStarted","Data":"1252a6c2d439c82c8e3fac247099f9575adf0ab558a63361946c90dd7ae29508"} Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.493438 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerID="f530c531e5288f2689cff215e01e587137e113cf00a3e0d36561c4c9a158bd61" exitCode=0 Nov 30 06:49:42 crc kubenswrapper[4941]: I1130 06:49:42.493504 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerDied","Data":"f530c531e5288f2689cff215e01e587137e113cf00a3e0d36561c4c9a158bd61"} Nov 30 06:49:43 crc kubenswrapper[4941]: I1130 06:49:43.500243 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c9266077-b4e5-4258-a860-02f87c13bb5c","Type":"ContainerStarted","Data":"84be2253e3bb3c830604626b484be0f1ac6a034e3c54f77ae54ebe26bbf2d49f"} Nov 30 06:49:43 crc kubenswrapper[4941]: I1130 06:49:43.501964 4941 generic.go:334] "Generic (PLEG): container finished" podID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerID="68f052336cd4e1caeb5ab4a67f41e6997c0283d7f947d52cb912f68798d63725" exitCode=0 Nov 30 06:49:43 crc kubenswrapper[4941]: I1130 06:49:43.502007 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sx2h8" event={"ID":"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5","Type":"ContainerDied","Data":"68f052336cd4e1caeb5ab4a67f41e6997c0283d7f947d52cb912f68798d63725"} Nov 30 06:49:43 crc kubenswrapper[4941]: I1130 06:49:43.520671 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.520650691 podStartE2EDuration="2.520650691s" podCreationTimestamp="2025-11-30 06:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:49:43.517939591 +0000 UTC m=+204.286111190" watchObservedRunningTime="2025-11-30 06:49:43.520650691 +0000 UTC m=+204.288822290" Nov 30 06:49:46 crc kubenswrapper[4941]: I1130 06:49:46.522426 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerStarted","Data":"cfa7fc77fd66abc4138ca8dab28c2e01c85fd80897ac41ae92999932b3656c56"} Nov 30 06:49:47 crc kubenswrapper[4941]: I1130 06:49:47.227013 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:49:47 crc kubenswrapper[4941]: I1130 06:49:47.227062 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:49:47 crc kubenswrapper[4941]: I1130 06:49:47.274796 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:49:47 crc kubenswrapper[4941]: I1130 06:49:47.564604 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nwk4x" podStartSLOduration=3.625811895 podStartE2EDuration="49.56458687s" podCreationTimestamp="2025-11-30 06:48:58 +0000 UTC" firstStartedPulling="2025-11-30 06:49:00.050414731 +0000 UTC m=+160.818586340" lastFinishedPulling="2025-11-30 06:49:45.989189706 +0000 UTC m=+206.757361315" observedRunningTime="2025-11-30 06:49:47.56214293 +0000 UTC m=+208.330314549" watchObservedRunningTime="2025-11-30 06:49:47.56458687 +0000 UTC m=+208.332758479" Nov 30 06:49:47 crc kubenswrapper[4941]: I1130 06:49:47.594058 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.419253 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.420262 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.541975 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sx2h8" event={"ID":"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5","Type":"ContainerStarted","Data":"87b80a5c3bd051ce58a229189fb4895666e23c1d08da9a9c04fc4eed7f7f546c"} Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.543521 4941 generic.go:334] "Generic (PLEG): container finished" podID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerID="73fe24fbd8f5e58dd2bbae8801b8af563f5821589e505fedda35047d9321b609" exitCode=0 Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.543593 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9lzhs" event={"ID":"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f","Type":"ContainerDied","Data":"73fe24fbd8f5e58dd2bbae8801b8af563f5821589e505fedda35047d9321b609"} Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.547407 4941 generic.go:334] "Generic (PLEG): container finished" podID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerID="f0d479c8a44b939d4fd3da93eb63a389352fe3de40c96e36ae4609d7aa67ecd3" exitCode=0 Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.547455 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerDied","Data":"f0d479c8a44b939d4fd3da93eb63a389352fe3de40c96e36ae4609d7aa67ecd3"} Nov 30 06:49:48 crc kubenswrapper[4941]: I1130 06:49:48.561935 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sx2h8" podStartSLOduration=2.9305907490000003 podStartE2EDuration="54.561920589s" podCreationTimestamp="2025-11-30 06:48:54 +0000 UTC" firstStartedPulling="2025-11-30 06:48:55.785971618 +0000 UTC m=+156.554143217" lastFinishedPulling="2025-11-30 06:49:47.417301428 +0000 UTC m=+208.185473057" observedRunningTime="2025-11-30 06:49:48.559920493 +0000 UTC m=+209.328092102" watchObservedRunningTime="2025-11-30 06:49:48.561920589 +0000 UTC m=+209.330092198" Nov 30 06:49:49 crc kubenswrapper[4941]: I1130 06:49:49.462285 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nwk4x" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="registry-server" probeResult="failure" output=< Nov 30 06:49:49 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 06:49:49 crc kubenswrapper[4941]: > Nov 30 06:49:49 crc kubenswrapper[4941]: I1130 06:49:49.564008 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerStarted","Data":"a14a148a0d9ecb6f31969f95857c154a8e31150c40233fe7da4d1ecb0f1ab33d"} Nov 30 06:49:49 crc kubenswrapper[4941]: I1130 06:49:49.567379 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9lzhs" event={"ID":"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f","Type":"ContainerStarted","Data":"12ca5df56c29b76a5e5ec03d49e4b764b1491bd2a534e652ccf96e577eb3309e"} Nov 30 06:49:49 crc kubenswrapper[4941]: I1130 06:49:49.584767 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dzm96" podStartSLOduration=2.611366426 podStartE2EDuration="52.584750491s" podCreationTimestamp="2025-11-30 06:48:57 +0000 UTC" firstStartedPulling="2025-11-30 06:48:59.012295505 +0000 UTC m=+159.780467114" lastFinishedPulling="2025-11-30 06:49:48.98567957 +0000 UTC m=+209.753851179" observedRunningTime="2025-11-30 06:49:49.581616797 +0000 UTC m=+210.349788406" watchObservedRunningTime="2025-11-30 06:49:49.584750491 +0000 UTC m=+210.352922100" Nov 30 06:49:49 crc kubenswrapper[4941]: I1130 06:49:49.603259 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9lzhs" podStartSLOduration=2.550248089 podStartE2EDuration="53.603237542s" podCreationTimestamp="2025-11-30 06:48:56 +0000 UTC" firstStartedPulling="2025-11-30 06:48:57.908438355 +0000 UTC m=+158.676609964" lastFinishedPulling="2025-11-30 06:49:48.961427808 +0000 UTC m=+209.729599417" observedRunningTime="2025-11-30 06:49:49.600540053 +0000 UTC m=+210.368711662" watchObservedRunningTime="2025-11-30 06:49:49.603237542 +0000 UTC m=+210.371409171" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.154613 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hhb7m"] Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.155308 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hhb7m" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="registry-server" containerID="cri-o://bfb5472b56146c15184c3111142a8814d7c20368f48c3117d79bd9d4daa0e9f6" gracePeriod=2 Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.576619 4941 generic.go:334] "Generic (PLEG): container finished" podID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerID="bfb5472b56146c15184c3111142a8814d7c20368f48c3117d79bd9d4daa0e9f6" exitCode=0 Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.576675 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerDied","Data":"bfb5472b56146c15184c3111142a8814d7c20368f48c3117d79bd9d4daa0e9f6"} Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.576710 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hhb7m" event={"ID":"fe26a6e1-683c-4afa-8e12-454e55a8e950","Type":"ContainerDied","Data":"3c8bd2403fca4f6118c30b7e4c088617bb61773d1764730999f19aba1d3f716d"} Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.576724 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c8bd2403fca4f6118c30b7e4c088617bb61773d1764730999f19aba1d3f716d" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.605505 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.664891 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-utilities\") pod \"fe26a6e1-683c-4afa-8e12-454e55a8e950\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.665040 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-catalog-content\") pod \"fe26a6e1-683c-4afa-8e12-454e55a8e950\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.665090 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bvvc\" (UniqueName: \"kubernetes.io/projected/fe26a6e1-683c-4afa-8e12-454e55a8e950-kube-api-access-4bvvc\") pod \"fe26a6e1-683c-4afa-8e12-454e55a8e950\" (UID: \"fe26a6e1-683c-4afa-8e12-454e55a8e950\") " Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.666100 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-utilities" (OuterVolumeSpecName: "utilities") pod "fe26a6e1-683c-4afa-8e12-454e55a8e950" (UID: "fe26a6e1-683c-4afa-8e12-454e55a8e950"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.672534 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe26a6e1-683c-4afa-8e12-454e55a8e950-kube-api-access-4bvvc" (OuterVolumeSpecName: "kube-api-access-4bvvc") pod "fe26a6e1-683c-4afa-8e12-454e55a8e950" (UID: "fe26a6e1-683c-4afa-8e12-454e55a8e950"). InnerVolumeSpecName "kube-api-access-4bvvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.685802 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe26a6e1-683c-4afa-8e12-454e55a8e950" (UID: "fe26a6e1-683c-4afa-8e12-454e55a8e950"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.766838 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.767271 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe26a6e1-683c-4afa-8e12-454e55a8e950-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:50 crc kubenswrapper[4941]: I1130 06:49:50.767284 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bvvc\" (UniqueName: \"kubernetes.io/projected/fe26a6e1-683c-4afa-8e12-454e55a8e950-kube-api-access-4bvvc\") on node \"crc\" DevicePath \"\"" Nov 30 06:49:51 crc kubenswrapper[4941]: I1130 06:49:51.583000 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hhb7m" Nov 30 06:49:51 crc kubenswrapper[4941]: I1130 06:49:51.605992 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hhb7m"] Nov 30 06:49:51 crc kubenswrapper[4941]: I1130 06:49:51.609127 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hhb7m"] Nov 30 06:49:53 crc kubenswrapper[4941]: I1130 06:49:53.530026 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" path="/var/lib/kubelet/pods/fe26a6e1-683c-4afa-8e12-454e55a8e950/volumes" Nov 30 06:49:54 crc kubenswrapper[4941]: I1130 06:49:54.832673 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:49:54 crc kubenswrapper[4941]: I1130 06:49:54.832772 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:49:54 crc kubenswrapper[4941]: I1130 06:49:54.888739 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:49:55 crc kubenswrapper[4941]: I1130 06:49:55.661843 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:49:56 crc kubenswrapper[4941]: I1130 06:49:56.810985 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:49:56 crc kubenswrapper[4941]: I1130 06:49:56.811044 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:49:56 crc kubenswrapper[4941]: I1130 06:49:56.851639 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:49:57 crc kubenswrapper[4941]: I1130 06:49:57.693685 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:49:58 crc kubenswrapper[4941]: I1130 06:49:58.058953 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:49:58 crc kubenswrapper[4941]: I1130 06:49:58.059080 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:49:58 crc kubenswrapper[4941]: I1130 06:49:58.143271 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:49:58 crc kubenswrapper[4941]: I1130 06:49:58.469393 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:49:58 crc kubenswrapper[4941]: I1130 06:49:58.508409 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:49:58 crc kubenswrapper[4941]: I1130 06:49:58.660686 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:50:00 crc kubenswrapper[4941]: I1130 06:50:00.952338 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nwk4x"] Nov 30 06:50:00 crc kubenswrapper[4941]: I1130 06:50:00.952862 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nwk4x" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="registry-server" containerID="cri-o://cfa7fc77fd66abc4138ca8dab28c2e01c85fd80897ac41ae92999932b3656c56" gracePeriod=2 Nov 30 06:50:01 crc kubenswrapper[4941]: I1130 06:50:01.650830 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerID="cfa7fc77fd66abc4138ca8dab28c2e01c85fd80897ac41ae92999932b3656c56" exitCode=0 Nov 30 06:50:01 crc kubenswrapper[4941]: I1130 06:50:01.650909 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerDied","Data":"cfa7fc77fd66abc4138ca8dab28c2e01c85fd80897ac41ae92999932b3656c56"} Nov 30 06:50:01 crc kubenswrapper[4941]: I1130 06:50:01.989887 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.029830 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsmtv\" (UniqueName: \"kubernetes.io/projected/ea77dd7e-611a-4a66-8ae6-8f45472ea609-kube-api-access-dsmtv\") pod \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.030081 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-utilities\") pod \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.030111 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-catalog-content\") pod \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\" (UID: \"ea77dd7e-611a-4a66-8ae6-8f45472ea609\") " Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.030957 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-utilities" (OuterVolumeSpecName: "utilities") pod "ea77dd7e-611a-4a66-8ae6-8f45472ea609" (UID: "ea77dd7e-611a-4a66-8ae6-8f45472ea609"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.036663 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea77dd7e-611a-4a66-8ae6-8f45472ea609-kube-api-access-dsmtv" (OuterVolumeSpecName: "kube-api-access-dsmtv") pod "ea77dd7e-611a-4a66-8ae6-8f45472ea609" (UID: "ea77dd7e-611a-4a66-8ae6-8f45472ea609"). InnerVolumeSpecName "kube-api-access-dsmtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.131492 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.131534 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsmtv\" (UniqueName: \"kubernetes.io/projected/ea77dd7e-611a-4a66-8ae6-8f45472ea609-kube-api-access-dsmtv\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.145366 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea77dd7e-611a-4a66-8ae6-8f45472ea609" (UID: "ea77dd7e-611a-4a66-8ae6-8f45472ea609"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.233070 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea77dd7e-611a-4a66-8ae6-8f45472ea609-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.658294 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nwk4x" event={"ID":"ea77dd7e-611a-4a66-8ae6-8f45472ea609","Type":"ContainerDied","Data":"b7ab1e42ca5bd15e4d75d96ca10706768105f0b8072c18e0bef6d8aefeeee9b2"} Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.658375 4941 scope.go:117] "RemoveContainer" containerID="cfa7fc77fd66abc4138ca8dab28c2e01c85fd80897ac41ae92999932b3656c56" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.658381 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nwk4x" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.684213 4941 scope.go:117] "RemoveContainer" containerID="f530c531e5288f2689cff215e01e587137e113cf00a3e0d36561c4c9a158bd61" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.707410 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nwk4x"] Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.711059 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nwk4x"] Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.717402 4941 scope.go:117] "RemoveContainer" containerID="5fe8af0326c28ec19cf74b62819842c5dc131bf33485a9f110c2140cf94d4f2e" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.978887 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.978956 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.979006 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.979609 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 06:50:02 crc kubenswrapper[4941]: I1130 06:50:02.979709 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363" gracePeriod=600 Nov 30 06:50:03 crc kubenswrapper[4941]: I1130 06:50:03.528035 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" path="/var/lib/kubelet/pods/ea77dd7e-611a-4a66-8ae6-8f45472ea609/volumes" Nov 30 06:50:03 crc kubenswrapper[4941]: I1130 06:50:03.669756 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363" exitCode=0 Nov 30 06:50:03 crc kubenswrapper[4941]: I1130 06:50:03.669802 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363"} Nov 30 06:50:03 crc kubenswrapper[4941]: I1130 06:50:03.669838 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"b1a9f4cdaf11c0f0c41c32ab91daf48e0bfa2787a5967c81bc4ee521f92fdb75"} Nov 30 06:50:06 crc kubenswrapper[4941]: I1130 06:50:06.191906 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrszt"] Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.002630 4941 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.003520 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="extract-utilities" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003536 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="extract-utilities" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.003553 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="extract-content" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003564 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="extract-content" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.003575 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="extract-utilities" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003588 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="extract-utilities" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.003602 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="extract-content" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003611 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="extract-content" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.003623 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="registry-server" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003631 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="registry-server" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.003643 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="registry-server" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003653 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="registry-server" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003795 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe26a6e1-683c-4afa-8e12-454e55a8e950" containerName="registry-server" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.003810 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea77dd7e-611a-4a66-8ae6-8f45472ea609" containerName="registry-server" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.004405 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.007761 4941 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.008519 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6" gracePeriod=15 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.010785 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c" gracePeriod=15 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.010825 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370" gracePeriod=15 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.010897 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16" gracePeriod=15 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.010936 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999" gracePeriod=15 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011413 4941 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011600 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011616 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011629 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011635 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011663 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011669 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011678 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011683 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011690 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011697 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011707 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011714 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.011722 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011730 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011849 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011862 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011869 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011877 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011887 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.011894 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.071858 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.088691 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.088777 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.088836 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.088951 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.088988 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.089058 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.089157 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.089195 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.148162 4941 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.148273 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190381 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190431 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190468 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190491 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190505 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190527 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190541 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190545 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190578 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190605 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190599 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190627 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190600 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190629 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190556 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.190605 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.362506 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:50:21 crc kubenswrapper[4941]: W1130 06:50:21.387878 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-923c171a8a73292cee1eca94519f5cc10081cb0b374c9d260da8165dae105804 WatchSource:0}: Error finding container 923c171a8a73292cee1eca94519f5cc10081cb0b374c9d260da8165dae105804: Status 404 returned error can't find the container with id 923c171a8a73292cee1eca94519f5cc10081cb0b374c9d260da8165dae105804 Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.392096 4941 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.107:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187cb624615e7831 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-30 06:50:21.391050801 +0000 UTC m=+242.159222420,LastTimestamp:2025-11-30 06:50:21.391050801 +0000 UTC m=+242.159222420,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 30 06:50:21 crc kubenswrapper[4941]: E1130 06:50:21.589407 4941 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.107:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" volumeName="registry-storage" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.777242 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.779236 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.780436 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c" exitCode=0 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.780497 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16" exitCode=0 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.780518 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370" exitCode=0 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.780537 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999" exitCode=2 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.780523 4941 scope.go:117] "RemoveContainer" containerID="a00d2ee4ab49ccfbdc6558b008f43199aead5caa7b6acbcc31e126ff678c890c" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.783041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1"} Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.783100 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"923c171a8a73292cee1eca94519f5cc10081cb0b374c9d260da8165dae105804"} Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.783908 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.785565 4941 generic.go:334] "Generic (PLEG): container finished" podID="c9266077-b4e5-4258-a860-02f87c13bb5c" containerID="84be2253e3bb3c830604626b484be0f1ac6a034e3c54f77ae54ebe26bbf2d49f" exitCode=0 Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.785634 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c9266077-b4e5-4258-a860-02f87c13bb5c","Type":"ContainerDied","Data":"84be2253e3bb3c830604626b484be0f1ac6a034e3c54f77ae54ebe26bbf2d49f"} Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.786722 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:21 crc kubenswrapper[4941]: I1130 06:50:21.787303 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.398281 4941 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.399583 4941 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.400122 4941 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.400757 4941 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.401379 4941 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:22 crc kubenswrapper[4941]: I1130 06:50:22.401468 4941 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.402231 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="200ms" Nov 30 06:50:22 crc kubenswrapper[4941]: E1130 06:50:22.604444 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="400ms" Nov 30 06:50:22 crc kubenswrapper[4941]: I1130 06:50:22.800014 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 30 06:50:23 crc kubenswrapper[4941]: E1130 06:50:23.005372 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="800ms" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.158972 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.161878 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.163509 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.225158 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-var-lock\") pod \"c9266077-b4e5-4258-a860-02f87c13bb5c\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.225231 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9266077-b4e5-4258-a860-02f87c13bb5c-kube-api-access\") pod \"c9266077-b4e5-4258-a860-02f87c13bb5c\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.225273 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-kubelet-dir\") pod \"c9266077-b4e5-4258-a860-02f87c13bb5c\" (UID: \"c9266077-b4e5-4258-a860-02f87c13bb5c\") " Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.225309 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-var-lock" (OuterVolumeSpecName: "var-lock") pod "c9266077-b4e5-4258-a860-02f87c13bb5c" (UID: "c9266077-b4e5-4258-a860-02f87c13bb5c"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.225500 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c9266077-b4e5-4258-a860-02f87c13bb5c" (UID: "c9266077-b4e5-4258-a860-02f87c13bb5c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.225577 4941 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-var-lock\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.233802 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9266077-b4e5-4258-a860-02f87c13bb5c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c9266077-b4e5-4258-a860-02f87c13bb5c" (UID: "c9266077-b4e5-4258-a860-02f87c13bb5c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.327342 4941 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c9266077-b4e5-4258-a860-02f87c13bb5c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.327378 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c9266077-b4e5-4258-a860-02f87c13bb5c-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:23 crc kubenswrapper[4941]: E1130 06:50:23.806386 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="1.6s" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.814478 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"c9266077-b4e5-4258-a860-02f87c13bb5c","Type":"ContainerDied","Data":"1252a6c2d439c82c8e3fac247099f9575adf0ab558a63361946c90dd7ae29508"} Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.814522 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.814535 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1252a6c2d439c82c8e3fac247099f9575adf0ab558a63361946c90dd7ae29508" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.819274 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.819856 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.820682 4941 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6" exitCode=0 Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.822552 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.895285 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.896417 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.897275 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.897669 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.898370 4941 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.935914 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936040 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936041 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936101 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936136 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936229 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936495 4941 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936522 4941 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:23 crc kubenswrapper[4941]: I1130 06:50:23.936541 4941 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.833410 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.834652 4941 scope.go:117] "RemoveContainer" containerID="4a906413491b113ef81af870d54b909a3560d248197eefddf527aa7c7277b35c" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.834803 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.855491 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.856921 4941 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.857752 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.859623 4941 scope.go:117] "RemoveContainer" containerID="cd1627448a8eb5aab00c396ffc00d73ac96a680181a7f9f465f05a260525fe16" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.878507 4941 scope.go:117] "RemoveContainer" containerID="f7ea14ad1b80b7cbc9ccc1fed17dfa0a22da09cc8aad49fd3068753d57918370" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.895950 4941 scope.go:117] "RemoveContainer" containerID="fe60dd2b5abd74fb9b8fa7eae69abbdb4586cc0d2fa65249d48a352654ab5999" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.910729 4941 scope.go:117] "RemoveContainer" containerID="5dc3b342c817e01d472055c8d6e7652ee4df0da221a94f65c94659ccb4bb43e6" Nov 30 06:50:24 crc kubenswrapper[4941]: I1130 06:50:24.925346 4941 scope.go:117] "RemoveContainer" containerID="751d96fe94b6b6d83dcb3c67acbd4548af16d2650eb623aff51b211bafe4aa27" Nov 30 06:50:25 crc kubenswrapper[4941]: E1130 06:50:25.409157 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="3.2s" Nov 30 06:50:25 crc kubenswrapper[4941]: I1130 06:50:25.538685 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Nov 30 06:50:26 crc kubenswrapper[4941]: E1130 06:50:26.573523 4941 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.107:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187cb624615e7831 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-30 06:50:21.391050801 +0000 UTC m=+242.159222420,LastTimestamp:2025-11-30 06:50:21.391050801 +0000 UTC m=+242.159222420,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 30 06:50:28 crc kubenswrapper[4941]: E1130 06:50:28.610763 4941 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.107:6443: connect: connection refused" interval="6.4s" Nov 30 06:50:29 crc kubenswrapper[4941]: I1130 06:50:29.524056 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:29 crc kubenswrapper[4941]: I1130 06:50:29.524873 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.231588 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" containerName="oauth-openshift" containerID="cri-o://4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b" gracePeriod=15 Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.521733 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.523463 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.523917 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.542142 4941 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.542237 4941 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:31 crc kubenswrapper[4941]: E1130 06:50:31.542728 4941 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.543236 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.630026 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.630606 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.630868 4941 status_manager.go:851] "Failed to get status for pod" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrszt\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.631237 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649113 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-session\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649167 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-cliconfig\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649190 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-provider-selection\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649215 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-dir\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649233 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z58l6\" (UniqueName: \"kubernetes.io/projected/6c62f053-996e-44b6-9a65-3d7f292b6cef-kube-api-access-z58l6\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649260 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-router-certs\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649278 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-policies\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649294 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-ocp-branding-template\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649318 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-serving-cert\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649349 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-service-ca\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.649366 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-login\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.650848 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.651542 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.651572 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.651874 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.657107 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.657107 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.657592 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c62f053-996e-44b6-9a65-3d7f292b6cef-kube-api-access-z58l6" (OuterVolumeSpecName: "kube-api-access-z58l6") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "kube-api-access-z58l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.660015 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.660549 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.661000 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.661289 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750067 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-idp-0-file-data\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750205 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-error\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750308 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-trusted-ca-bundle\") pod \"6c62f053-996e-44b6-9a65-3d7f292b6cef\" (UID: \"6c62f053-996e-44b6-9a65-3d7f292b6cef\") " Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750671 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750687 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750696 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750708 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750717 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750727 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750736 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750745 4941 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750754 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z58l6\" (UniqueName: \"kubernetes.io/projected/6c62f053-996e-44b6-9a65-3d7f292b6cef-kube-api-access-z58l6\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750762 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.750771 4941 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.751824 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.754699 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.755242 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "6c62f053-996e-44b6-9a65-3d7f292b6cef" (UID: "6c62f053-996e-44b6-9a65-3d7f292b6cef"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.851307 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.851358 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.851370 4941 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6c62f053-996e-44b6-9a65-3d7f292b6cef-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.893483 4941 generic.go:334] "Generic (PLEG): container finished" podID="6c62f053-996e-44b6-9a65-3d7f292b6cef" containerID="4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b" exitCode=0 Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.893537 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.893546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" event={"ID":"6c62f053-996e-44b6-9a65-3d7f292b6cef","Type":"ContainerDied","Data":"4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b"} Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.893571 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" event={"ID":"6c62f053-996e-44b6-9a65-3d7f292b6cef","Type":"ContainerDied","Data":"980edae3ab499bf5e801714dfe18278d39944f00ebffacd09ab0d67123b1c1b3"} Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.893585 4941 scope.go:117] "RemoveContainer" containerID="4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.894258 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.894541 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.894779 4941 status_manager.go:851] "Failed to get status for pod" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrszt\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.895790 4941 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="b1263ae9238bb842b20222a4ed76d12003adec7a029b358b2e02c2c64d4d2154" exitCode=0 Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.895833 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"b1263ae9238bb842b20222a4ed76d12003adec7a029b358b2e02c2c64d4d2154"} Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.895860 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"19608cad9db2841bdc331c8f4a80c4adb3d58bd9d4c706a59c3ff69b05005262"} Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.896119 4941 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.896132 4941 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.896389 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.896616 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.896852 4941 status_manager.go:851] "Failed to get status for pod" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrszt\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: E1130 06:50:31.897310 4941 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.909845 4941 status_manager.go:851] "Failed to get status for pod" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" pod="openshift-authentication/oauth-openshift-558db77b4-xrszt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrszt\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.910377 4941 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.910709 4941 status_manager.go:851] "Failed to get status for pod" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.107:6443: connect: connection refused" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.918264 4941 scope.go:117] "RemoveContainer" containerID="4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b" Nov 30 06:50:31 crc kubenswrapper[4941]: E1130 06:50:31.919994 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b\": container with ID starting with 4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b not found: ID does not exist" containerID="4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b" Nov 30 06:50:31 crc kubenswrapper[4941]: I1130 06:50:31.920035 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b"} err="failed to get container status \"4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b\": rpc error: code = NotFound desc = could not find container \"4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b\": container with ID starting with 4c1f543ec281670554ad6522922d061d445abe5e27efd6ed6b5436452babc39b not found: ID does not exist" Nov 30 06:50:32 crc kubenswrapper[4941]: I1130 06:50:32.910099 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bb46ed52bb14aacae4112071df7c07989efa1c3491f919f7fd178bbee6f3881e"} Nov 30 06:50:32 crc kubenswrapper[4941]: I1130 06:50:32.910438 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a693606a40aa2a027c2c6d89b636bb65e9df4e708306dae36c960098886fef91"} Nov 30 06:50:32 crc kubenswrapper[4941]: I1130 06:50:32.910458 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"12df0fe4da8606d8875b1df053edfb6b6d74702a99d2707afefddd28dfe8c238"} Nov 30 06:50:32 crc kubenswrapper[4941]: I1130 06:50:32.910473 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"15521db721f1a89147b5991e325db50b5c794e9b8c8972ee98db862846e1ac4b"} Nov 30 06:50:33 crc kubenswrapper[4941]: I1130 06:50:33.920591 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"237d8bf78f0d017c196d629724decec98ace4b099c59ab6bfaf9e25e1bdd8012"} Nov 30 06:50:33 crc kubenswrapper[4941]: I1130 06:50:33.920915 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:33 crc kubenswrapper[4941]: I1130 06:50:33.921076 4941 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:33 crc kubenswrapper[4941]: I1130 06:50:33.921106 4941 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:36 crc kubenswrapper[4941]: I1130 06:50:36.544374 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:36 crc kubenswrapper[4941]: I1130 06:50:36.544598 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:36 crc kubenswrapper[4941]: I1130 06:50:36.551418 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.931203 4941 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.955646 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.955687 4941 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529" exitCode=1 Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.956022 4941 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.956038 4941 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.956172 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529"} Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.956544 4941 scope.go:117] "RemoveContainer" containerID="3a63b00cbb06b3fc676b799162f3f7f0a70f163d7453414b78e05aff9406e529" Nov 30 06:50:38 crc kubenswrapper[4941]: I1130 06:50:38.963657 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.166428 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.537222 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.541677 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="446b275a-bfc8-4fb1-9350-cc2d95368d57" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.970663 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.972385 4941 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.972639 4941 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="3d5f4c0b-b3cf-42bb-ba2b-bfae750c4a62" Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.972435 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ea7f2ad86c44e6d26831d74ce044bd200df15d5ec133d118cf69767099d238e6"} Nov 30 06:50:39 crc kubenswrapper[4941]: I1130 06:50:39.978472 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="446b275a-bfc8-4fb1-9350-cc2d95368d57" Nov 30 06:50:44 crc kubenswrapper[4941]: I1130 06:50:44.908668 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 30 06:50:44 crc kubenswrapper[4941]: I1130 06:50:44.967397 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.080231 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.190954 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.214315 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.215673 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.252042 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.267830 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.291171 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.367800 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.405703 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.582457 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.596588 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.818796 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.907992 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.972647 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 30 06:50:45 crc kubenswrapper[4941]: I1130 06:50:45.987133 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.017173 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.121225 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.186197 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.402631 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.483559 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.815945 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 30 06:50:46 crc kubenswrapper[4941]: I1130 06:50:46.918641 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.309241 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.390986 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.462887 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.464713 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.465190 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.492273 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.635989 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.805807 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.900619 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 30 06:50:47 crc kubenswrapper[4941]: I1130 06:50:47.971576 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.079133 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.119222 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.215500 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.219599 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.291223 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.410092 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.425418 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.463893 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.508132 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.579793 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.812490 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.945811 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.960155 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 30 06:50:48 crc kubenswrapper[4941]: I1130 06:50:48.988028 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.029095 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.033491 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.037271 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.137469 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.222876 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.228458 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.497408 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.677640 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.780601 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 30 06:50:49 crc kubenswrapper[4941]: I1130 06:50:49.887340 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.010652 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.011866 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.118499 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.176854 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.207645 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.272644 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.315310 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.465023 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.664871 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.725104 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.748803 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.772360 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 30 06:50:50 crc kubenswrapper[4941]: I1130 06:50:50.794541 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.029104 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.149786 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.203105 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.218268 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.238173 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.281736 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.483536 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.618043 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.804968 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.871830 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 30 06:50:51 crc kubenswrapper[4941]: I1130 06:50:51.938968 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 30 06:50:52 crc kubenswrapper[4941]: I1130 06:50:52.591022 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 30 06:50:52 crc kubenswrapper[4941]: I1130 06:50:52.749052 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 30 06:50:52 crc kubenswrapper[4941]: I1130 06:50:52.758399 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 30 06:50:52 crc kubenswrapper[4941]: I1130 06:50:52.879021 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 30 06:50:52 crc kubenswrapper[4941]: I1130 06:50:52.925302 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 30 06:50:52 crc kubenswrapper[4941]: I1130 06:50:52.965919 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.076547 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.427765 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.477383 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.581886 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.582406 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.641473 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.766756 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.861306 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 30 06:50:53 crc kubenswrapper[4941]: I1130 06:50:53.971594 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.098204 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.108232 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.250867 4941 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.274946 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.453431 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.638618 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.672479 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 30 06:50:54 crc kubenswrapper[4941]: I1130 06:50:54.827059 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.201953 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.255889 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.401076 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.644546 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.786156 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.804884 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.834485 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.937468 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.986266 4941 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.989566 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=34.989549569 podStartE2EDuration="34.989549569s" podCreationTimestamp="2025-11-30 06:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:50:38.946126709 +0000 UTC m=+259.714298318" watchObservedRunningTime="2025-11-30 06:50:55.989549569 +0000 UTC m=+276.757721178" Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.990898 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrszt","openshift-kube-apiserver/kube-apiserver-crc"] Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.990943 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 30 06:50:55 crc kubenswrapper[4941]: I1130 06:50:55.994742 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.006221 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.006201861 podStartE2EDuration="18.006201861s" podCreationTimestamp="2025-11-30 06:50:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:50:56.006028785 +0000 UTC m=+276.774200394" watchObservedRunningTime="2025-11-30 06:50:56.006201861 +0000 UTC m=+276.774373480" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.103752 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.301072 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.343745 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.348651 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.426242 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.677208 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.717311 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.720909 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.726909 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.769886 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.791324 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 30 06:50:56 crc kubenswrapper[4941]: I1130 06:50:56.803483 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.029709 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.040560 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.153196 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.262754 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.291678 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.325857 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.424250 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.441162 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.476311 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.509581 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.528375 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" path="/var/lib/kubelet/pods/6c62f053-996e-44b6-9a65-3d7f292b6cef/volumes" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.601541 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.826842 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.922233 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 30 06:50:57 crc kubenswrapper[4941]: I1130 06:50:57.962892 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.011120 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.114539 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.366364 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.444680 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.485111 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.500419 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.561644 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.831938 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.876867 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.880814 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 30 06:50:58 crc kubenswrapper[4941]: I1130 06:50:58.967289 4941 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.026005 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.026350 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.128257 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.172628 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.214957 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.221027 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.325444 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.349588 4941 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.450116 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.483308 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.569694 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.599436 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.670688 4941 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.688540 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.732041 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.759091 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.822489 4941 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.854527 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 30 06:50:59 crc kubenswrapper[4941]: I1130 06:50:59.994569 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.112057 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.185390 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.398459 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.471910 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.682063 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.736544 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.756624 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.803165 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.880118 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.901534 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.944085 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.976949 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 30 06:51:00 crc kubenswrapper[4941]: I1130 06:51:00.994386 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.023774 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.120078 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.177521 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.247970 4941 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.248645 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1" gracePeriod=5 Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.252439 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.320396 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.520110 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.528293 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.559615 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.776116 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.784969 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.809233 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 30 06:51:01 crc kubenswrapper[4941]: I1130 06:51:01.947001 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.017044 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.058535 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.172141 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.228546 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.238002 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.429785 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.434992 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.475897 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.523033 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.735772 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.775482 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 30 06:51:02 crc kubenswrapper[4941]: I1130 06:51:02.851843 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.083509 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.134279 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.140639 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.143667 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc"] Nov 30 06:51:03 crc kubenswrapper[4941]: E1130 06:51:03.143916 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.143937 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 30 06:51:03 crc kubenswrapper[4941]: E1130 06:51:03.143946 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" containerName="oauth-openshift" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.143953 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" containerName="oauth-openshift" Nov 30 06:51:03 crc kubenswrapper[4941]: E1130 06:51:03.143972 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" containerName="installer" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.143980 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" containerName="installer" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.144101 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9266077-b4e5-4258-a860-02f87c13bb5c" containerName="installer" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.144116 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.144126 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c62f053-996e-44b6-9a65-3d7f292b6cef" containerName="oauth-openshift" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.144584 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.148622 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.148719 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149003 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149020 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149006 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149156 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149302 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149316 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149433 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.149557 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.152680 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.153082 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.153848 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc"] Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.158557 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.160213 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.167500 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.168270 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.277658 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/752c3bb9-54c8-48f0-87b3-1e08f1498c86-audit-dir\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.277969 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-error\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.277999 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-audit-policies\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278024 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278042 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278067 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278151 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-login\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278184 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278214 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-service-ca\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278253 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh4q5\" (UniqueName: \"kubernetes.io/projected/752c3bb9-54c8-48f0-87b3-1e08f1498c86-kube-api-access-kh4q5\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278393 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278412 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-session\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.278430 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-router-certs\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.379823 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-error\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.379891 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-audit-policies\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.379924 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.379948 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.379989 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.380016 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.380039 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-login\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.380065 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.380092 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-service-ca\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.380126 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh4q5\" (UniqueName: \"kubernetes.io/projected/752c3bb9-54c8-48f0-87b3-1e08f1498c86-kube-api-access-kh4q5\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.380915 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.381264 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-session\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.381347 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-router-certs\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.381391 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/752c3bb9-54c8-48f0-87b3-1e08f1498c86-audit-dir\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.381531 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/752c3bb9-54c8-48f0-87b3-1e08f1498c86-audit-dir\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.381351 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-audit-policies\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.382008 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.382567 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.382979 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-service-ca\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.386248 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-session\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.386688 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-router-certs\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.386836 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-error\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.387646 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.387680 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-login\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.388206 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.391295 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.403017 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/752c3bb9-54c8-48f0-87b3-1e08f1498c86-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.409416 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh4q5\" (UniqueName: \"kubernetes.io/projected/752c3bb9-54c8-48f0-87b3-1e08f1498c86-kube-api-access-kh4q5\") pod \"oauth-openshift-68c4c8b9fb-dpzmc\" (UID: \"752c3bb9-54c8-48f0-87b3-1e08f1498c86\") " pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.463216 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.646589 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.695601 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc"] Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.902679 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 30 06:51:03 crc kubenswrapper[4941]: I1130 06:51:03.991964 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.009195 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.118664 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.141005 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" event={"ID":"752c3bb9-54c8-48f0-87b3-1e08f1498c86","Type":"ContainerStarted","Data":"9d8f69d0706f79fbb1ba68df119c4c06b0bad7584fab28065d795f47d47e45d8"} Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.141052 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" event={"ID":"752c3bb9-54c8-48f0-87b3-1e08f1498c86","Type":"ContainerStarted","Data":"2b21f3deaa185c6f8dba3e9a15035ea1f9ca19c9dcf87e7365f067e2cedba8bd"} Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.142280 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.146413 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.164424 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" podStartSLOduration=58.164404541 podStartE2EDuration="58.164404541s" podCreationTimestamp="2025-11-30 06:50:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:51:04.160157975 +0000 UTC m=+284.928329604" watchObservedRunningTime="2025-11-30 06:51:04.164404541 +0000 UTC m=+284.932576150" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.337106 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-68c4c8b9fb-dpzmc" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.361160 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.509171 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.510752 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.523760 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.703800 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 30 06:51:04 crc kubenswrapper[4941]: I1130 06:51:04.767702 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.113239 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.144204 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.246892 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.337813 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.463661 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.554768 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.658424 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.875140 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.884070 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 30 06:51:05 crc kubenswrapper[4941]: I1130 06:51:05.976536 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.064080 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.400165 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.403621 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.403756 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528108 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528175 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528216 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528249 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528314 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528832 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.528979 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.529067 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.529088 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.541097 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.630162 4941 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.630205 4941 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.630217 4941 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.630229 4941 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:06 crc kubenswrapper[4941]: I1130 06:51:06.630239 4941 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.164741 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.164825 4941 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1" exitCode=137 Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.164890 4941 scope.go:117] "RemoveContainer" containerID="ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.164913 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.197123 4941 scope.go:117] "RemoveContainer" containerID="ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1" Nov 30 06:51:07 crc kubenswrapper[4941]: E1130 06:51:07.197874 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1\": container with ID starting with ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1 not found: ID does not exist" containerID="ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.197913 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1"} err="failed to get container status \"ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1\": rpc error: code = NotFound desc = could not find container \"ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1\": container with ID starting with ad157c621d84c4e570dd837c96aaae9290ad751a39f415cd3dee9358ec99e3d1 not found: ID does not exist" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.202022 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.529117 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.529377 4941 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.549046 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.549109 4941 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="301b47aa-b772-400d-b86b-01ed741ddcfc" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.552501 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.552556 4941 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="301b47aa-b772-400d-b86b-01ed741ddcfc" Nov 30 06:51:07 crc kubenswrapper[4941]: I1130 06:51:07.786819 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 30 06:51:08 crc kubenswrapper[4941]: I1130 06:51:08.167998 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 30 06:51:08 crc kubenswrapper[4941]: I1130 06:51:08.317707 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 30 06:51:08 crc kubenswrapper[4941]: I1130 06:51:08.542959 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 30 06:51:08 crc kubenswrapper[4941]: I1130 06:51:08.719665 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.965452 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sx2h8"] Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.966634 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sx2h8" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="registry-server" containerID="cri-o://87b80a5c3bd051ce58a229189fb4895666e23c1d08da9a9c04fc4eed7f7f546c" gracePeriod=30 Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.971644 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5dr7"] Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.972052 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h5dr7" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="registry-server" containerID="cri-o://e200e68ea283cf309f8e679e749c6679f254b837656fc124799d8ac6d43377dc" gracePeriod=30 Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.993039 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vw95c"] Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.993701 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerName="marketplace-operator" containerID="cri-o://3c93e3a7fa54f0faf07ea224bc60eb39cfee08efabce356fb5fa279efcfce3cc" gracePeriod=30 Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.999309 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9lzhs"] Nov 30 06:51:12 crc kubenswrapper[4941]: I1130 06:51:12.999625 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9lzhs" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="registry-server" containerID="cri-o://12ca5df56c29b76a5e5ec03d49e4b764b1491bd2a534e652ccf96e577eb3309e" gracePeriod=30 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.002519 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dzm96"] Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.003149 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dzm96" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="registry-server" containerID="cri-o://a14a148a0d9ecb6f31969f95857c154a8e31150c40233fe7da4d1ecb0f1ab33d" gracePeriod=30 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.007017 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hzvnb"] Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.007933 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.047169 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hzvnb"] Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.134311 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbbdg\" (UniqueName: \"kubernetes.io/projected/09c52383-5a1a-4a4a-a354-46da2eee2a39-kube-api-access-mbbdg\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.134502 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09c52383-5a1a-4a4a-a354-46da2eee2a39-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.134600 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/09c52383-5a1a-4a4a-a354-46da2eee2a39-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.204245 4941 generic.go:334] "Generic (PLEG): container finished" podID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerID="87b80a5c3bd051ce58a229189fb4895666e23c1d08da9a9c04fc4eed7f7f546c" exitCode=0 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.204801 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sx2h8" event={"ID":"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5","Type":"ContainerDied","Data":"87b80a5c3bd051ce58a229189fb4895666e23c1d08da9a9c04fc4eed7f7f546c"} Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.209400 4941 generic.go:334] "Generic (PLEG): container finished" podID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerID="e200e68ea283cf309f8e679e749c6679f254b837656fc124799d8ac6d43377dc" exitCode=0 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.209487 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5dr7" event={"ID":"295ff105-17f2-40dc-96ab-8cc8b03031af","Type":"ContainerDied","Data":"e200e68ea283cf309f8e679e749c6679f254b837656fc124799d8ac6d43377dc"} Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.212235 4941 generic.go:334] "Generic (PLEG): container finished" podID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerID="12ca5df56c29b76a5e5ec03d49e4b764b1491bd2a534e652ccf96e577eb3309e" exitCode=0 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.212298 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9lzhs" event={"ID":"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f","Type":"ContainerDied","Data":"12ca5df56c29b76a5e5ec03d49e4b764b1491bd2a534e652ccf96e577eb3309e"} Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.213415 4941 generic.go:334] "Generic (PLEG): container finished" podID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerID="3c93e3a7fa54f0faf07ea224bc60eb39cfee08efabce356fb5fa279efcfce3cc" exitCode=0 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.213502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" event={"ID":"43fead9f-0d7a-4d82-8822-b4e83849d4ad","Type":"ContainerDied","Data":"3c93e3a7fa54f0faf07ea224bc60eb39cfee08efabce356fb5fa279efcfce3cc"} Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.215579 4941 generic.go:334] "Generic (PLEG): container finished" podID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerID="a14a148a0d9ecb6f31969f95857c154a8e31150c40233fe7da4d1ecb0f1ab33d" exitCode=0 Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.215623 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerDied","Data":"a14a148a0d9ecb6f31969f95857c154a8e31150c40233fe7da4d1ecb0f1ab33d"} Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.236602 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbbdg\" (UniqueName: \"kubernetes.io/projected/09c52383-5a1a-4a4a-a354-46da2eee2a39-kube-api-access-mbbdg\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.236685 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09c52383-5a1a-4a4a-a354-46da2eee2a39-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.236724 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/09c52383-5a1a-4a4a-a354-46da2eee2a39-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.238397 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09c52383-5a1a-4a4a-a354-46da2eee2a39-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.252558 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/09c52383-5a1a-4a4a-a354-46da2eee2a39-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.260642 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbbdg\" (UniqueName: \"kubernetes.io/projected/09c52383-5a1a-4a4a-a354-46da2eee2a39-kube-api-access-mbbdg\") pod \"marketplace-operator-79b997595-hzvnb\" (UID: \"09c52383-5a1a-4a4a-a354-46da2eee2a39\") " pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.340876 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.390932 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.395452 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.404716 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.426063 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.427024 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443720 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgcf5\" (UniqueName: \"kubernetes.io/projected/43fead9f-0d7a-4d82-8822-b4e83849d4ad-kube-api-access-tgcf5\") pod \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443777 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics\") pod \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443811 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-catalog-content\") pod \"295ff105-17f2-40dc-96ab-8cc8b03031af\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443837 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca\") pod \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\" (UID: \"43fead9f-0d7a-4d82-8822-b4e83849d4ad\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443856 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phgxc\" (UniqueName: \"kubernetes.io/projected/295ff105-17f2-40dc-96ab-8cc8b03031af-kube-api-access-phgxc\") pod \"295ff105-17f2-40dc-96ab-8cc8b03031af\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443891 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hvvf\" (UniqueName: \"kubernetes.io/projected/a44f18a4-b76a-48fa-b2e9-df18115674d3-kube-api-access-6hvvf\") pod \"a44f18a4-b76a-48fa-b2e9-df18115674d3\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443915 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-catalog-content\") pod \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443934 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zmsw\" (UniqueName: \"kubernetes.io/projected/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-kube-api-access-9zmsw\") pod \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.443979 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5nsw\" (UniqueName: \"kubernetes.io/projected/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-kube-api-access-n5nsw\") pod \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.444008 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-utilities\") pod \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.444030 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-utilities\") pod \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\" (UID: \"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.444054 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-utilities\") pod \"295ff105-17f2-40dc-96ab-8cc8b03031af\" (UID: \"295ff105-17f2-40dc-96ab-8cc8b03031af\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.444075 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-utilities\") pod \"a44f18a4-b76a-48fa-b2e9-df18115674d3\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.444095 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-catalog-content\") pod \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\" (UID: \"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.444117 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-catalog-content\") pod \"a44f18a4-b76a-48fa-b2e9-df18115674d3\" (UID: \"a44f18a4-b76a-48fa-b2e9-df18115674d3\") " Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.445423 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "43fead9f-0d7a-4d82-8822-b4e83849d4ad" (UID: "43fead9f-0d7a-4d82-8822-b4e83849d4ad"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.447076 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-utilities" (OuterVolumeSpecName: "utilities") pod "7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" (UID: "7647c27f-b440-4d1c-8e6f-ffa56aa08a3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.447665 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-utilities" (OuterVolumeSpecName: "utilities") pod "a44f18a4-b76a-48fa-b2e9-df18115674d3" (UID: "a44f18a4-b76a-48fa-b2e9-df18115674d3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.450272 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-utilities" (OuterVolumeSpecName: "utilities") pod "295ff105-17f2-40dc-96ab-8cc8b03031af" (UID: "295ff105-17f2-40dc-96ab-8cc8b03031af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.451594 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-kube-api-access-n5nsw" (OuterVolumeSpecName: "kube-api-access-n5nsw") pod "4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" (UID: "4348bd9e-d48d-41a7-9cbc-14620a7a8aa5"). InnerVolumeSpecName "kube-api-access-n5nsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.451638 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-kube-api-access-9zmsw" (OuterVolumeSpecName: "kube-api-access-9zmsw") pod "7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" (UID: "7647c27f-b440-4d1c-8e6f-ffa56aa08a3f"). InnerVolumeSpecName "kube-api-access-9zmsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.451644 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "43fead9f-0d7a-4d82-8822-b4e83849d4ad" (UID: "43fead9f-0d7a-4d82-8822-b4e83849d4ad"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.451738 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/295ff105-17f2-40dc-96ab-8cc8b03031af-kube-api-access-phgxc" (OuterVolumeSpecName: "kube-api-access-phgxc") pod "295ff105-17f2-40dc-96ab-8cc8b03031af" (UID: "295ff105-17f2-40dc-96ab-8cc8b03031af"). InnerVolumeSpecName "kube-api-access-phgxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.452186 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-utilities" (OuterVolumeSpecName: "utilities") pod "4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" (UID: "4348bd9e-d48d-41a7-9cbc-14620a7a8aa5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.453735 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43fead9f-0d7a-4d82-8822-b4e83849d4ad-kube-api-access-tgcf5" (OuterVolumeSpecName: "kube-api-access-tgcf5") pod "43fead9f-0d7a-4d82-8822-b4e83849d4ad" (UID: "43fead9f-0d7a-4d82-8822-b4e83849d4ad"). InnerVolumeSpecName "kube-api-access-tgcf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.473687 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" (UID: "7647c27f-b440-4d1c-8e6f-ffa56aa08a3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.495158 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a44f18a4-b76a-48fa-b2e9-df18115674d3-kube-api-access-6hvvf" (OuterVolumeSpecName: "kube-api-access-6hvvf") pod "a44f18a4-b76a-48fa-b2e9-df18115674d3" (UID: "a44f18a4-b76a-48fa-b2e9-df18115674d3"). InnerVolumeSpecName "kube-api-access-6hvvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.539378 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" (UID: "4348bd9e-d48d-41a7-9cbc-14620a7a8aa5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550490 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550564 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550641 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550693 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgcf5\" (UniqueName: \"kubernetes.io/projected/43fead9f-0d7a-4d82-8822-b4e83849d4ad-kube-api-access-tgcf5\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550713 4941 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550727 4941 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/43fead9f-0d7a-4d82-8822-b4e83849d4ad-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550740 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phgxc\" (UniqueName: \"kubernetes.io/projected/295ff105-17f2-40dc-96ab-8cc8b03031af-kube-api-access-phgxc\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550760 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550776 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hvvf\" (UniqueName: \"kubernetes.io/projected/a44f18a4-b76a-48fa-b2e9-df18115674d3-kube-api-access-6hvvf\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550789 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zmsw\" (UniqueName: \"kubernetes.io/projected/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-kube-api-access-9zmsw\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550805 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5nsw\" (UniqueName: \"kubernetes.io/projected/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-kube-api-access-n5nsw\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550826 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.550839 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.563023 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "295ff105-17f2-40dc-96ab-8cc8b03031af" (UID: "295ff105-17f2-40dc-96ab-8cc8b03031af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.624787 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a44f18a4-b76a-48fa-b2e9-df18115674d3" (UID: "a44f18a4-b76a-48fa-b2e9-df18115674d3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.652911 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a44f18a4-b76a-48fa-b2e9-df18115674d3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.653175 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/295ff105-17f2-40dc-96ab-8cc8b03031af-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:13 crc kubenswrapper[4941]: I1130 06:51:13.786026 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hzvnb"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.225824 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h5dr7" event={"ID":"295ff105-17f2-40dc-96ab-8cc8b03031af","Type":"ContainerDied","Data":"b19befbc3c0b5fac8c31a702e63e6729a24798bbf3567793e0a8bce887d297d6"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.225893 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h5dr7" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.225913 4941 scope.go:117] "RemoveContainer" containerID="e200e68ea283cf309f8e679e749c6679f254b837656fc124799d8ac6d43377dc" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.229305 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9lzhs" event={"ID":"7647c27f-b440-4d1c-8e6f-ffa56aa08a3f","Type":"ContainerDied","Data":"27f7fbaa065bbd7f39f85e4ee5e98c3468595e973206ffea8530006bdcf9e6f5"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.229387 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9lzhs" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.233222 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" event={"ID":"43fead9f-0d7a-4d82-8822-b4e83849d4ad","Type":"ContainerDied","Data":"4e69c64019afdf1464a32e19bea8f0890eb9678ee1a4c5718ad754f33a9d00a4"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.233426 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vw95c" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.237832 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzm96" event={"ID":"a44f18a4-b76a-48fa-b2e9-df18115674d3","Type":"ContainerDied","Data":"138568fa5675ad5235be2cd91904159aca59671b617ce3e54128341b4e96a8e8"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.237842 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzm96" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.239709 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" event={"ID":"09c52383-5a1a-4a4a-a354-46da2eee2a39","Type":"ContainerStarted","Data":"f8b29d8a4239098aa2d10293e7e105dcce208942bfc0e2dd503bbc8fc9ddd4ca"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.239768 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" event={"ID":"09c52383-5a1a-4a4a-a354-46da2eee2a39","Type":"ContainerStarted","Data":"05b583ce8abf22f2a3fb9c677c84f5339342550a71aac367e731df2ab97e40bd"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.240897 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.242722 4941 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hzvnb container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.242788 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" podUID="09c52383-5a1a-4a4a-a354-46da2eee2a39" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.251605 4941 scope.go:117] "RemoveContainer" containerID="a5c21cf456e6e7f08dbd41abfcedcde72124061b1b1f53702b1f5227fc62972c" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.255437 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sx2h8" event={"ID":"4348bd9e-d48d-41a7-9cbc-14620a7a8aa5","Type":"ContainerDied","Data":"cc991cdbb0df7046a80133cc78023f37f982bb527899296ca7689a37cff581bb"} Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.255569 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sx2h8" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.268219 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" podStartSLOduration=2.268199197 podStartE2EDuration="2.268199197s" podCreationTimestamp="2025-11-30 06:51:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:51:14.263153624 +0000 UTC m=+295.031325233" watchObservedRunningTime="2025-11-30 06:51:14.268199197 +0000 UTC m=+295.036370806" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.279142 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9lzhs"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.282377 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9lzhs"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.286634 4941 scope.go:117] "RemoveContainer" containerID="90993c53316a955956df4dff7b571c70236e1769cd3f0b34a43e4e08cca35bb2" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.292571 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vw95c"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.299981 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vw95c"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.306350 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h5dr7"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.309119 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h5dr7"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.312638 4941 scope.go:117] "RemoveContainer" containerID="12ca5df56c29b76a5e5ec03d49e4b764b1491bd2a534e652ccf96e577eb3309e" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.312769 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sx2h8"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.316860 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sx2h8"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.326037 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dzm96"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.329176 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dzm96"] Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.336998 4941 scope.go:117] "RemoveContainer" containerID="73fe24fbd8f5e58dd2bbae8801b8af563f5821589e505fedda35047d9321b609" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.353875 4941 scope.go:117] "RemoveContainer" containerID="ec3c93357795ff7d7e1fa7e47dc4e2a6285e98a4c5bea560fda93297d71a6f09" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.371605 4941 scope.go:117] "RemoveContainer" containerID="3c93e3a7fa54f0faf07ea224bc60eb39cfee08efabce356fb5fa279efcfce3cc" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.391096 4941 scope.go:117] "RemoveContainer" containerID="a14a148a0d9ecb6f31969f95857c154a8e31150c40233fe7da4d1ecb0f1ab33d" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.407912 4941 scope.go:117] "RemoveContainer" containerID="f0d479c8a44b939d4fd3da93eb63a389352fe3de40c96e36ae4609d7aa67ecd3" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.426803 4941 scope.go:117] "RemoveContainer" containerID="d01abf5371fe6bacbd9b4abaab16e6589f34dc3cbcba07e0ba6066346b9ba6ed" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.443651 4941 scope.go:117] "RemoveContainer" containerID="87b80a5c3bd051ce58a229189fb4895666e23c1d08da9a9c04fc4eed7f7f546c" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.465784 4941 scope.go:117] "RemoveContainer" containerID="68f052336cd4e1caeb5ab4a67f41e6997c0283d7f947d52cb912f68798d63725" Nov 30 06:51:14 crc kubenswrapper[4941]: I1130 06:51:14.488026 4941 scope.go:117] "RemoveContainer" containerID="284bce6c4dfec5a10c835d6bf079b749657214e484f5cf82bf410c2243c2af21" Nov 30 06:51:15 crc kubenswrapper[4941]: I1130 06:51:15.272224 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hzvnb" Nov 30 06:51:15 crc kubenswrapper[4941]: I1130 06:51:15.530807 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" path="/var/lib/kubelet/pods/295ff105-17f2-40dc-96ab-8cc8b03031af/volumes" Nov 30 06:51:15 crc kubenswrapper[4941]: I1130 06:51:15.531491 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" path="/var/lib/kubelet/pods/4348bd9e-d48d-41a7-9cbc-14620a7a8aa5/volumes" Nov 30 06:51:15 crc kubenswrapper[4941]: I1130 06:51:15.532085 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" path="/var/lib/kubelet/pods/43fead9f-0d7a-4d82-8822-b4e83849d4ad/volumes" Nov 30 06:51:15 crc kubenswrapper[4941]: I1130 06:51:15.532951 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" path="/var/lib/kubelet/pods/7647c27f-b440-4d1c-8e6f-ffa56aa08a3f/volumes" Nov 30 06:51:15 crc kubenswrapper[4941]: I1130 06:51:15.533542 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" path="/var/lib/kubelet/pods/a44f18a4-b76a-48fa-b2e9-df18115674d3/volumes" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.452231 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vhvpc"] Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.452980 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" podUID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" containerName="controller-manager" containerID="cri-o://3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f" gracePeriod=30 Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.550539 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd"] Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.551064 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" podUID="5ccfee3c-1083-4a51-a25b-4678f31d3a51" containerName="route-controller-manager" containerID="cri-o://f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec" gracePeriod=30 Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.787186 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.876843 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.914262 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-proxy-ca-bundles\") pod \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.914349 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6krj\" (UniqueName: \"kubernetes.io/projected/843edada-6eb9-46da-ba98-05ccfcd4cb1b-kube-api-access-x6krj\") pod \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.914386 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-client-ca\") pod \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.914412 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843edada-6eb9-46da-ba98-05ccfcd4cb1b-serving-cert\") pod \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.914458 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-config\") pod \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\" (UID: \"843edada-6eb9-46da-ba98-05ccfcd4cb1b\") " Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.915244 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "843edada-6eb9-46da-ba98-05ccfcd4cb1b" (UID: "843edada-6eb9-46da-ba98-05ccfcd4cb1b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.915269 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-config" (OuterVolumeSpecName: "config") pod "843edada-6eb9-46da-ba98-05ccfcd4cb1b" (UID: "843edada-6eb9-46da-ba98-05ccfcd4cb1b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.915541 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-client-ca" (OuterVolumeSpecName: "client-ca") pod "843edada-6eb9-46da-ba98-05ccfcd4cb1b" (UID: "843edada-6eb9-46da-ba98-05ccfcd4cb1b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.921361 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/843edada-6eb9-46da-ba98-05ccfcd4cb1b-kube-api-access-x6krj" (OuterVolumeSpecName: "kube-api-access-x6krj") pod "843edada-6eb9-46da-ba98-05ccfcd4cb1b" (UID: "843edada-6eb9-46da-ba98-05ccfcd4cb1b"). InnerVolumeSpecName "kube-api-access-x6krj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.926289 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/843edada-6eb9-46da-ba98-05ccfcd4cb1b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "843edada-6eb9-46da-ba98-05ccfcd4cb1b" (UID: "843edada-6eb9-46da-ba98-05ccfcd4cb1b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987142 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-46544"] Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987340 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987352 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987360 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987366 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987375 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987381 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987389 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ccfee3c-1083-4a51-a25b-4678f31d3a51" containerName="route-controller-manager" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987395 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ccfee3c-1083-4a51-a25b-4678f31d3a51" containerName="route-controller-manager" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987403 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987409 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987416 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987422 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987431 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987437 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987444 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987449 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987457 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987463 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987472 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987481 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="extract-utilities" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987490 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987496 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987505 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" containerName="controller-manager" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987511 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" containerName="controller-manager" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987519 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987525 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="extract-content" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987532 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987538 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: E1130 06:51:32.987545 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerName="marketplace-operator" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987552 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerName="marketplace-operator" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987641 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="295ff105-17f2-40dc-96ab-8cc8b03031af" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987652 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ccfee3c-1083-4a51-a25b-4678f31d3a51" containerName="route-controller-manager" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987660 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7647c27f-b440-4d1c-8e6f-ffa56aa08a3f" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987670 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a44f18a4-b76a-48fa-b2e9-df18115674d3" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987681 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" containerName="controller-manager" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987687 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4348bd9e-d48d-41a7-9cbc-14620a7a8aa5" containerName="registry-server" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.987693 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="43fead9f-0d7a-4d82-8822-b4e83849d4ad" containerName="marketplace-operator" Nov 30 06:51:32 crc kubenswrapper[4941]: I1130 06:51:32.988034 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.005515 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.006093 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.015735 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-config\") pod \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.015821 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24xhs\" (UniqueName: \"kubernetes.io/projected/5ccfee3c-1083-4a51-a25b-4678f31d3a51-kube-api-access-24xhs\") pod \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.015897 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ccfee3c-1083-4a51-a25b-4678f31d3a51-serving-cert\") pod \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016060 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-client-ca\") pod \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\" (UID: \"5ccfee3c-1083-4a51-a25b-4678f31d3a51\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016464 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-config" (OuterVolumeSpecName: "config") pod "5ccfee3c-1083-4a51-a25b-4678f31d3a51" (UID: "5ccfee3c-1083-4a51-a25b-4678f31d3a51"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016508 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-client-ca" (OuterVolumeSpecName: "client-ca") pod "5ccfee3c-1083-4a51-a25b-4678f31d3a51" (UID: "5ccfee3c-1083-4a51-a25b-4678f31d3a51"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016750 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016857 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843edada-6eb9-46da-ba98-05ccfcd4cb1b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016892 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016926 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ccfee3c-1083-4a51-a25b-4678f31d3a51-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016954 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.016990 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6krj\" (UniqueName: \"kubernetes.io/projected/843edada-6eb9-46da-ba98-05ccfcd4cb1b-kube-api-access-x6krj\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.017062 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/843edada-6eb9-46da-ba98-05ccfcd4cb1b-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.018841 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ccfee3c-1083-4a51-a25b-4678f31d3a51-kube-api-access-24xhs" (OuterVolumeSpecName: "kube-api-access-24xhs") pod "5ccfee3c-1083-4a51-a25b-4678f31d3a51" (UID: "5ccfee3c-1083-4a51-a25b-4678f31d3a51"). InnerVolumeSpecName "kube-api-access-24xhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.019709 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ccfee3c-1083-4a51-a25b-4678f31d3a51-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5ccfee3c-1083-4a51-a25b-4678f31d3a51" (UID: "5ccfee3c-1083-4a51-a25b-4678f31d3a51"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.037109 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-46544"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.047801 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.085690 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-46544"] Nov 30 06:51:33 crc kubenswrapper[4941]: E1130 06:51:33.086034 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[client-ca config kube-api-access-jvrsx proxy-ca-bundles serving-cert], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-controller-manager/controller-manager-696c56d59-46544" podUID="daaff2e4-969a-4e8b-81e2-870b713c45af" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.096939 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj"] Nov 30 06:51:33 crc kubenswrapper[4941]: E1130 06:51:33.097218 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[client-ca config kube-api-access-j7mpp serving-cert], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" podUID="d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118080 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-client-ca\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118125 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-serving-cert\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118143 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7mpp\" (UniqueName: \"kubernetes.io/projected/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-kube-api-access-j7mpp\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118166 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvrsx\" (UniqueName: \"kubernetes.io/projected/daaff2e4-969a-4e8b-81e2-870b713c45af-kube-api-access-jvrsx\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118280 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-config\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118387 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daaff2e4-969a-4e8b-81e2-870b713c45af-serving-cert\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118699 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-config\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118787 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-client-ca\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118823 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-proxy-ca-bundles\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118911 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24xhs\" (UniqueName: \"kubernetes.io/projected/5ccfee3c-1083-4a51-a25b-4678f31d3a51-kube-api-access-24xhs\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.118928 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ccfee3c-1083-4a51-a25b-4678f31d3a51-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219703 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-config\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219757 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-client-ca\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219778 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-proxy-ca-bundles\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219811 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-client-ca\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219831 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-serving-cert\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219850 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7mpp\" (UniqueName: \"kubernetes.io/projected/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-kube-api-access-j7mpp\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219874 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvrsx\" (UniqueName: \"kubernetes.io/projected/daaff2e4-969a-4e8b-81e2-870b713c45af-kube-api-access-jvrsx\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219903 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-config\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.219928 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daaff2e4-969a-4e8b-81e2-870b713c45af-serving-cert\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.220875 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-client-ca\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.220961 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-client-ca\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.221249 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-proxy-ca-bundles\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.221285 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-config\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.221538 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-config\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.223094 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daaff2e4-969a-4e8b-81e2-870b713c45af-serving-cert\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.223520 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-serving-cert\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.233926 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7mpp\" (UniqueName: \"kubernetes.io/projected/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-kube-api-access-j7mpp\") pod \"route-controller-manager-768ccd95d9-g6dvj\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.235284 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvrsx\" (UniqueName: \"kubernetes.io/projected/daaff2e4-969a-4e8b-81e2-870b713c45af-kube-api-access-jvrsx\") pod \"controller-manager-696c56d59-46544\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.388862 4941 generic.go:334] "Generic (PLEG): container finished" podID="5ccfee3c-1083-4a51-a25b-4678f31d3a51" containerID="f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec" exitCode=0 Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.388976 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.388976 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" event={"ID":"5ccfee3c-1083-4a51-a25b-4678f31d3a51","Type":"ContainerDied","Data":"f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec"} Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.389251 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd" event={"ID":"5ccfee3c-1083-4a51-a25b-4678f31d3a51","Type":"ContainerDied","Data":"54d3129817b120dcdada567847544f49d11cccf08094a0861e091c57fb8d9212"} Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.389294 4941 scope.go:117] "RemoveContainer" containerID="f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.391418 4941 generic.go:334] "Generic (PLEG): container finished" podID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" containerID="3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f" exitCode=0 Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.391493 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.391537 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" event={"ID":"843edada-6eb9-46da-ba98-05ccfcd4cb1b","Type":"ContainerDied","Data":"3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f"} Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.391569 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" event={"ID":"843edada-6eb9-46da-ba98-05ccfcd4cb1b","Type":"ContainerDied","Data":"7484892378b3e479b528319d4f480cce493c6115f46546e4792b63914081df53"} Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.391749 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vhvpc" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.391961 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.401176 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.410677 4941 scope.go:117] "RemoveContainer" containerID="f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec" Nov 30 06:51:33 crc kubenswrapper[4941]: E1130 06:51:33.411161 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec\": container with ID starting with f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec not found: ID does not exist" containerID="f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.411252 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec"} err="failed to get container status \"f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec\": rpc error: code = NotFound desc = could not find container \"f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec\": container with ID starting with f08a3d661a1728d039ead64cad828ffb64307eb3a8344c4b0c541900bf47adec not found: ID does not exist" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.411358 4941 scope.go:117] "RemoveContainer" containerID="3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.412831 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.425237 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vhvpc"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.430974 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vhvpc"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.431358 4941 scope.go:117] "RemoveContainer" containerID="3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f" Nov 30 06:51:33 crc kubenswrapper[4941]: E1130 06:51:33.432210 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f\": container with ID starting with 3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f not found: ID does not exist" containerID="3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.432243 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f"} err="failed to get container status \"3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f\": rpc error: code = NotFound desc = could not find container \"3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f\": container with ID starting with 3529d6c42f991e36c06071721673e55022a3239cade098c0ff7086977916971f not found: ID does not exist" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.435388 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.439488 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hzwfd"] Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.524918 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-proxy-ca-bundles\") pod \"daaff2e4-969a-4e8b-81e2-870b713c45af\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.525366 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "daaff2e4-969a-4e8b-81e2-870b713c45af" (UID: "daaff2e4-969a-4e8b-81e2-870b713c45af"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.526392 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-client-ca\") pod \"daaff2e4-969a-4e8b-81e2-870b713c45af\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.526553 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvrsx\" (UniqueName: \"kubernetes.io/projected/daaff2e4-969a-4e8b-81e2-870b713c45af-kube-api-access-jvrsx\") pod \"daaff2e4-969a-4e8b-81e2-870b713c45af\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527097 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-client-ca" (OuterVolumeSpecName: "client-ca") pod "daaff2e4-969a-4e8b-81e2-870b713c45af" (UID: "daaff2e4-969a-4e8b-81e2-870b713c45af"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527450 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-config\") pod \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527506 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-serving-cert\") pod \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527549 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-client-ca\") pod \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527596 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7mpp\" (UniqueName: \"kubernetes.io/projected/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-kube-api-access-j7mpp\") pod \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\" (UID: \"d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527636 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-config\") pod \"daaff2e4-969a-4e8b-81e2-870b713c45af\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.527707 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daaff2e4-969a-4e8b-81e2-870b713c45af-serving-cert\") pod \"daaff2e4-969a-4e8b-81e2-870b713c45af\" (UID: \"daaff2e4-969a-4e8b-81e2-870b713c45af\") " Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.528200 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.528236 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.528272 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-client-ca" (OuterVolumeSpecName: "client-ca") pod "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803" (UID: "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.528308 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-config" (OuterVolumeSpecName: "config") pod "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803" (UID: "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.529062 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-config" (OuterVolumeSpecName: "config") pod "daaff2e4-969a-4e8b-81e2-870b713c45af" (UID: "daaff2e4-969a-4e8b-81e2-870b713c45af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.531079 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daaff2e4-969a-4e8b-81e2-870b713c45af-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "daaff2e4-969a-4e8b-81e2-870b713c45af" (UID: "daaff2e4-969a-4e8b-81e2-870b713c45af"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.531275 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803" (UID: "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.531379 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-kube-api-access-j7mpp" (OuterVolumeSpecName: "kube-api-access-j7mpp") pod "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803" (UID: "d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803"). InnerVolumeSpecName "kube-api-access-j7mpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.532548 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daaff2e4-969a-4e8b-81e2-870b713c45af-kube-api-access-jvrsx" (OuterVolumeSpecName: "kube-api-access-jvrsx") pod "daaff2e4-969a-4e8b-81e2-870b713c45af" (UID: "daaff2e4-969a-4e8b-81e2-870b713c45af"). InnerVolumeSpecName "kube-api-access-jvrsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.535207 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ccfee3c-1083-4a51-a25b-4678f31d3a51" path="/var/lib/kubelet/pods/5ccfee3c-1083-4a51-a25b-4678f31d3a51/volumes" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.535937 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="843edada-6eb9-46da-ba98-05ccfcd4cb1b" path="/var/lib/kubelet/pods/843edada-6eb9-46da-ba98-05ccfcd4cb1b/volumes" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629818 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvrsx\" (UniqueName: \"kubernetes.io/projected/daaff2e4-969a-4e8b-81e2-870b713c45af-kube-api-access-jvrsx\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629858 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629868 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629877 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629890 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7mpp\" (UniqueName: \"kubernetes.io/projected/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803-kube-api-access-j7mpp\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629898 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daaff2e4-969a-4e8b-81e2-870b713c45af-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:33 crc kubenswrapper[4941]: I1130 06:51:33.629909 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/daaff2e4-969a-4e8b-81e2-870b713c45af-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.398377 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696c56d59-46544" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.398399 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.430855 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-hb26p"] Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.431615 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.450211 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.450598 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.450669 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.450743 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.451546 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.458253 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.464828 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-46544"] Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.470960 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-46544"] Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.474677 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-hb26p"] Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.478261 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.481363 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj"] Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.482692 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-g6dvj"] Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.541490 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/3a398792-576d-4f38-afe8-7d567a69727b-kube-api-access-tdv9q\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.542371 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-client-ca\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.542494 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-config\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.542576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a398792-576d-4f38-afe8-7d567a69727b-serving-cert\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.542609 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-proxy-ca-bundles\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.643312 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-client-ca\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.643591 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-config\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.643703 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a398792-576d-4f38-afe8-7d567a69727b-serving-cert\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.643780 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-proxy-ca-bundles\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.643865 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/3a398792-576d-4f38-afe8-7d567a69727b-kube-api-access-tdv9q\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.644367 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-client-ca\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.645216 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-proxy-ca-bundles\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.645649 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-config\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.647172 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a398792-576d-4f38-afe8-7d567a69727b-serving-cert\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.661694 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/3a398792-576d-4f38-afe8-7d567a69727b-kube-api-access-tdv9q\") pod \"controller-manager-5947bdc4b4-hb26p\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.770341 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:34 crc kubenswrapper[4941]: I1130 06:51:34.989880 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-hb26p"] Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.408978 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" event={"ID":"3a398792-576d-4f38-afe8-7d567a69727b","Type":"ContainerStarted","Data":"762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18"} Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.409376 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" event={"ID":"3a398792-576d-4f38-afe8-7d567a69727b","Type":"ContainerStarted","Data":"8807da1d6762c807ba8dc4c66ce583611dd3a62a8904c67a2c891db67128103c"} Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.409400 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.414146 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.427350 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" podStartSLOduration=2.427318113 podStartE2EDuration="2.427318113s" podCreationTimestamp="2025-11-30 06:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:51:35.426453754 +0000 UTC m=+316.194625383" watchObservedRunningTime="2025-11-30 06:51:35.427318113 +0000 UTC m=+316.195489722" Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.528390 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803" path="/var/lib/kubelet/pods/d9e3a0f9-bc67-4ef1-8a78-5ed93b3df803/volumes" Nov 30 06:51:35 crc kubenswrapper[4941]: I1130 06:51:35.528812 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daaff2e4-969a-4e8b-81e2-870b713c45af" path="/var/lib/kubelet/pods/daaff2e4-969a-4e8b-81e2-870b713c45af/volumes" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.947276 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz"] Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.948262 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.951526 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.951525 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.951776 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.951942 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.952434 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.957449 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.962855 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz"] Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.980272 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-config\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.980335 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-client-ca\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.980373 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e58632c-642b-4077-8051-80405f3c0a7a-serving-cert\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:36 crc kubenswrapper[4941]: I1130 06:51:36.980451 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf2rk\" (UniqueName: \"kubernetes.io/projected/1e58632c-642b-4077-8051-80405f3c0a7a-kube-api-access-vf2rk\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.081092 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-config\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.081144 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-client-ca\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.081173 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e58632c-642b-4077-8051-80405f3c0a7a-serving-cert\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.081227 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf2rk\" (UniqueName: \"kubernetes.io/projected/1e58632c-642b-4077-8051-80405f3c0a7a-kube-api-access-vf2rk\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.082288 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-client-ca\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.083560 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-config\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.088063 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e58632c-642b-4077-8051-80405f3c0a7a-serving-cert\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.096855 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf2rk\" (UniqueName: \"kubernetes.io/projected/1e58632c-642b-4077-8051-80405f3c0a7a-kube-api-access-vf2rk\") pod \"route-controller-manager-8596447d46-7hxbz\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.279461 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:37 crc kubenswrapper[4941]: I1130 06:51:37.753996 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz"] Nov 30 06:51:37 crc kubenswrapper[4941]: W1130 06:51:37.756589 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e58632c_642b_4077_8051_80405f3c0a7a.slice/crio-e6013678fe054d0e59145805675761f5006a94d7bb1fd0a7a3cb273ff5dffb4d WatchSource:0}: Error finding container e6013678fe054d0e59145805675761f5006a94d7bb1fd0a7a3cb273ff5dffb4d: Status 404 returned error can't find the container with id e6013678fe054d0e59145805675761f5006a94d7bb1fd0a7a3cb273ff5dffb4d Nov 30 06:51:38 crc kubenswrapper[4941]: I1130 06:51:38.424461 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" event={"ID":"1e58632c-642b-4077-8051-80405f3c0a7a","Type":"ContainerStarted","Data":"ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd"} Nov 30 06:51:38 crc kubenswrapper[4941]: I1130 06:51:38.424745 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" event={"ID":"1e58632c-642b-4077-8051-80405f3c0a7a","Type":"ContainerStarted","Data":"e6013678fe054d0e59145805675761f5006a94d7bb1fd0a7a3cb273ff5dffb4d"} Nov 30 06:51:38 crc kubenswrapper[4941]: I1130 06:51:38.425932 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:38 crc kubenswrapper[4941]: I1130 06:51:38.433110 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:38 crc kubenswrapper[4941]: I1130 06:51:38.442548 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" podStartSLOduration=5.442530379 podStartE2EDuration="5.442530379s" podCreationTimestamp="2025-11-30 06:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:51:38.438965027 +0000 UTC m=+319.207136646" watchObservedRunningTime="2025-11-30 06:51:38.442530379 +0000 UTC m=+319.210701988" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.467418 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-hb26p"] Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.468140 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" podUID="3a398792-576d-4f38-afe8-7d567a69727b" containerName="controller-manager" containerID="cri-o://762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18" gracePeriod=30 Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.487627 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz"] Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.488100 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" podUID="1e58632c-642b-4077-8051-80405f3c0a7a" containerName="route-controller-manager" containerID="cri-o://ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd" gracePeriod=30 Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.964373 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.973414 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.995256 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e58632c-642b-4077-8051-80405f3c0a7a-serving-cert\") pod \"1e58632c-642b-4077-8051-80405f3c0a7a\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.995384 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-client-ca\") pod \"1e58632c-642b-4077-8051-80405f3c0a7a\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.995518 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/3a398792-576d-4f38-afe8-7d567a69727b-kube-api-access-tdv9q\") pod \"3a398792-576d-4f38-afe8-7d567a69727b\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.996145 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-client-ca" (OuterVolumeSpecName: "client-ca") pod "1e58632c-642b-4077-8051-80405f3c0a7a" (UID: "1e58632c-642b-4077-8051-80405f3c0a7a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.996775 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-client-ca\") pod \"3a398792-576d-4f38-afe8-7d567a69727b\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.996847 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-config\") pod \"1e58632c-642b-4077-8051-80405f3c0a7a\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.996937 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf2rk\" (UniqueName: \"kubernetes.io/projected/1e58632c-642b-4077-8051-80405f3c0a7a-kube-api-access-vf2rk\") pod \"1e58632c-642b-4077-8051-80405f3c0a7a\" (UID: \"1e58632c-642b-4077-8051-80405f3c0a7a\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.996994 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-proxy-ca-bundles\") pod \"3a398792-576d-4f38-afe8-7d567a69727b\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.997155 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-config\") pod \"3a398792-576d-4f38-afe8-7d567a69727b\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.997188 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a398792-576d-4f38-afe8-7d567a69727b-serving-cert\") pod \"3a398792-576d-4f38-afe8-7d567a69727b\" (UID: \"3a398792-576d-4f38-afe8-7d567a69727b\") " Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.997279 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-client-ca" (OuterVolumeSpecName: "client-ca") pod "3a398792-576d-4f38-afe8-7d567a69727b" (UID: "3a398792-576d-4f38-afe8-7d567a69727b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.997575 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.997612 4941 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-client-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.998169 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-config" (OuterVolumeSpecName: "config") pod "1e58632c-642b-4077-8051-80405f3c0a7a" (UID: "1e58632c-642b-4077-8051-80405f3c0a7a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.998748 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3a398792-576d-4f38-afe8-7d567a69727b" (UID: "3a398792-576d-4f38-afe8-7d567a69727b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:52 crc kubenswrapper[4941]: I1130 06:51:52.999236 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-config" (OuterVolumeSpecName: "config") pod "3a398792-576d-4f38-afe8-7d567a69727b" (UID: "3a398792-576d-4f38-afe8-7d567a69727b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.002042 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a398792-576d-4f38-afe8-7d567a69727b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3a398792-576d-4f38-afe8-7d567a69727b" (UID: "3a398792-576d-4f38-afe8-7d567a69727b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.003466 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e58632c-642b-4077-8051-80405f3c0a7a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1e58632c-642b-4077-8051-80405f3c0a7a" (UID: "1e58632c-642b-4077-8051-80405f3c0a7a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.004373 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a398792-576d-4f38-afe8-7d567a69727b-kube-api-access-tdv9q" (OuterVolumeSpecName: "kube-api-access-tdv9q") pod "3a398792-576d-4f38-afe8-7d567a69727b" (UID: "3a398792-576d-4f38-afe8-7d567a69727b"). InnerVolumeSpecName "kube-api-access-tdv9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.007251 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e58632c-642b-4077-8051-80405f3c0a7a-kube-api-access-vf2rk" (OuterVolumeSpecName: "kube-api-access-vf2rk") pod "1e58632c-642b-4077-8051-80405f3c0a7a" (UID: "1e58632c-642b-4077-8051-80405f3c0a7a"). InnerVolumeSpecName "kube-api-access-vf2rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099007 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdv9q\" (UniqueName: \"kubernetes.io/projected/3a398792-576d-4f38-afe8-7d567a69727b-kube-api-access-tdv9q\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099055 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e58632c-642b-4077-8051-80405f3c0a7a-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099068 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf2rk\" (UniqueName: \"kubernetes.io/projected/1e58632c-642b-4077-8051-80405f3c0a7a-kube-api-access-vf2rk\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099080 4941 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099091 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a398792-576d-4f38-afe8-7d567a69727b-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099101 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3a398792-576d-4f38-afe8-7d567a69727b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.099113 4941 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e58632c-642b-4077-8051-80405f3c0a7a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.507019 4941 generic.go:334] "Generic (PLEG): container finished" podID="1e58632c-642b-4077-8051-80405f3c0a7a" containerID="ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd" exitCode=0 Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.507392 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" event={"ID":"1e58632c-642b-4077-8051-80405f3c0a7a","Type":"ContainerDied","Data":"ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd"} Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.508116 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" event={"ID":"1e58632c-642b-4077-8051-80405f3c0a7a","Type":"ContainerDied","Data":"e6013678fe054d0e59145805675761f5006a94d7bb1fd0a7a3cb273ff5dffb4d"} Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.508253 4941 scope.go:117] "RemoveContainer" containerID="ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.507516 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.510245 4941 generic.go:334] "Generic (PLEG): container finished" podID="3a398792-576d-4f38-afe8-7d567a69727b" containerID="762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18" exitCode=0 Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.510288 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" event={"ID":"3a398792-576d-4f38-afe8-7d567a69727b","Type":"ContainerDied","Data":"762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18"} Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.510316 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" event={"ID":"3a398792-576d-4f38-afe8-7d567a69727b","Type":"ContainerDied","Data":"8807da1d6762c807ba8dc4c66ce583611dd3a62a8904c67a2c891db67128103c"} Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.510476 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5947bdc4b4-hb26p" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.544813 4941 scope.go:117] "RemoveContainer" containerID="ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd" Nov 30 06:51:53 crc kubenswrapper[4941]: E1130 06:51:53.545158 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd\": container with ID starting with ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd not found: ID does not exist" containerID="ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.545191 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd"} err="failed to get container status \"ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd\": rpc error: code = NotFound desc = could not find container \"ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd\": container with ID starting with ecde256814b2e528fd9ea47733ea16512ac87e915277526dc600911eef2487dd not found: ID does not exist" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.545219 4941 scope.go:117] "RemoveContainer" containerID="762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.568552 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-hb26p"] Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.574347 4941 scope.go:117] "RemoveContainer" containerID="762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18" Nov 30 06:51:53 crc kubenswrapper[4941]: E1130 06:51:53.574709 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18\": container with ID starting with 762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18 not found: ID does not exist" containerID="762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.574745 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18"} err="failed to get container status \"762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18\": rpc error: code = NotFound desc = could not find container \"762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18\": container with ID starting with 762cdf09155e51f5e85304f3f1df4d51bb31a8d6afd41137db5d98521f988c18 not found: ID does not exist" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.580099 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5947bdc4b4-hb26p"] Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.587951 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz"] Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.588001 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8596447d46-7hxbz"] Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.963507 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-6s8bc"] Nov 30 06:51:53 crc kubenswrapper[4941]: E1130 06:51:53.963780 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a398792-576d-4f38-afe8-7d567a69727b" containerName="controller-manager" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.963793 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a398792-576d-4f38-afe8-7d567a69727b" containerName="controller-manager" Nov 30 06:51:53 crc kubenswrapper[4941]: E1130 06:51:53.963814 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e58632c-642b-4077-8051-80405f3c0a7a" containerName="route-controller-manager" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.963820 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e58632c-642b-4077-8051-80405f3c0a7a" containerName="route-controller-manager" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.963902 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a398792-576d-4f38-afe8-7d567a69727b" containerName="controller-manager" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.963915 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e58632c-642b-4077-8051-80405f3c0a7a" containerName="route-controller-manager" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.964318 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.967841 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.967843 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.967990 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.968095 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.969016 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.969902 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.972775 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75"] Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.974485 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.976588 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.976853 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.977028 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.978090 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.978118 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.978261 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.978274 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 30 06:51:53 crc kubenswrapper[4941]: I1130 06:51:53.984181 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-6s8bc"] Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:53.994771 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75"] Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011476 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-config\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011523 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e376fe-d733-4630-8164-f40f575c402b-serving-cert\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011551 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e376fe-d733-4630-8164-f40f575c402b-config\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e376fe-d733-4630-8164-f40f575c402b-client-ca\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011601 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfprd\" (UniqueName: \"kubernetes.io/projected/54789aef-9237-45a4-b23b-8eede921131b-kube-api-access-nfprd\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011619 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvcml\" (UniqueName: \"kubernetes.io/projected/09e376fe-d733-4630-8164-f40f575c402b-kube-api-access-xvcml\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011639 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54789aef-9237-45a4-b23b-8eede921131b-serving-cert\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011655 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-client-ca\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.011678 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-proxy-ca-bundles\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.112863 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e376fe-d733-4630-8164-f40f575c402b-serving-cert\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.112914 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e376fe-d733-4630-8164-f40f575c402b-config\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.112952 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e376fe-d733-4630-8164-f40f575c402b-client-ca\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.112985 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfprd\" (UniqueName: \"kubernetes.io/projected/54789aef-9237-45a4-b23b-8eede921131b-kube-api-access-nfprd\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.113018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvcml\" (UniqueName: \"kubernetes.io/projected/09e376fe-d733-4630-8164-f40f575c402b-kube-api-access-xvcml\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.113049 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54789aef-9237-45a4-b23b-8eede921131b-serving-cert\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.113073 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-client-ca\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.113092 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-proxy-ca-bundles\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.113120 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-config\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.114968 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-config\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.114983 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-client-ca\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.115637 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/09e376fe-d733-4630-8164-f40f575c402b-client-ca\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.115935 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/54789aef-9237-45a4-b23b-8eede921131b-proxy-ca-bundles\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.116756 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e376fe-d733-4630-8164-f40f575c402b-config\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.121240 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54789aef-9237-45a4-b23b-8eede921131b-serving-cert\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.124887 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09e376fe-d733-4630-8164-f40f575c402b-serving-cert\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.130069 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfprd\" (UniqueName: \"kubernetes.io/projected/54789aef-9237-45a4-b23b-8eede921131b-kube-api-access-nfprd\") pod \"controller-manager-696c56d59-6s8bc\" (UID: \"54789aef-9237-45a4-b23b-8eede921131b\") " pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.134020 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvcml\" (UniqueName: \"kubernetes.io/projected/09e376fe-d733-4630-8164-f40f575c402b-kube-api-access-xvcml\") pod \"route-controller-manager-768ccd95d9-nfr75\" (UID: \"09e376fe-d733-4630-8164-f40f575c402b\") " pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.307430 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.324996 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.579026 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75"] Nov 30 06:51:54 crc kubenswrapper[4941]: W1130 06:51:54.587838 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09e376fe_d733_4630_8164_f40f575c402b.slice/crio-8e407b3a0b9ec739f0e0b414321c933da99a04ec99f0bea7b56934ec3a7682de WatchSource:0}: Error finding container 8e407b3a0b9ec739f0e0b414321c933da99a04ec99f0bea7b56934ec3a7682de: Status 404 returned error can't find the container with id 8e407b3a0b9ec739f0e0b414321c933da99a04ec99f0bea7b56934ec3a7682de Nov 30 06:51:54 crc kubenswrapper[4941]: I1130 06:51:54.630905 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-696c56d59-6s8bc"] Nov 30 06:51:54 crc kubenswrapper[4941]: W1130 06:51:54.641244 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54789aef_9237_45a4_b23b_8eede921131b.slice/crio-88ccc3478a01da2808ff0e14fa117b960df8c1e598325f275ce0afe7e924bff7 WatchSource:0}: Error finding container 88ccc3478a01da2808ff0e14fa117b960df8c1e598325f275ce0afe7e924bff7: Status 404 returned error can't find the container with id 88ccc3478a01da2808ff0e14fa117b960df8c1e598325f275ce0afe7e924bff7 Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.529226 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e58632c-642b-4077-8051-80405f3c0a7a" path="/var/lib/kubelet/pods/1e58632c-642b-4077-8051-80405f3c0a7a/volumes" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.529932 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a398792-576d-4f38-afe8-7d567a69727b" path="/var/lib/kubelet/pods/3a398792-576d-4f38-afe8-7d567a69727b/volumes" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.530362 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.530390 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" event={"ID":"09e376fe-d733-4630-8164-f40f575c402b","Type":"ContainerStarted","Data":"eaa5d471acd2fe612d1903ab6829a3ab71ed1569291c47bb4c6a8a38510e6a99"} Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.530413 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" event={"ID":"09e376fe-d733-4630-8164-f40f575c402b","Type":"ContainerStarted","Data":"8e407b3a0b9ec739f0e0b414321c933da99a04ec99f0bea7b56934ec3a7682de"} Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.530493 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" event={"ID":"54789aef-9237-45a4-b23b-8eede921131b","Type":"ContainerStarted","Data":"20aad651974ffe857fdca70265f35c01a6947f22199ba4cba42017b36d76c840"} Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.530537 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" event={"ID":"54789aef-9237-45a4-b23b-8eede921131b","Type":"ContainerStarted","Data":"88ccc3478a01da2808ff0e14fa117b960df8c1e598325f275ce0afe7e924bff7"} Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.530690 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.535243 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.536142 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.547371 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-768ccd95d9-nfr75" podStartSLOduration=3.5473503490000002 podStartE2EDuration="3.547350349s" podCreationTimestamp="2025-11-30 06:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:51:55.544167598 +0000 UTC m=+336.312339207" watchObservedRunningTime="2025-11-30 06:51:55.547350349 +0000 UTC m=+336.315521958" Nov 30 06:51:55 crc kubenswrapper[4941]: I1130 06:51:55.589641 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-696c56d59-6s8bc" podStartSLOduration=3.589616976 podStartE2EDuration="3.589616976s" podCreationTimestamp="2025-11-30 06:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:51:55.583702656 +0000 UTC m=+336.351874305" watchObservedRunningTime="2025-11-30 06:51:55.589616976 +0000 UTC m=+336.357788595" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.436068 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-w48sl"] Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.437506 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.450720 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-w48sl"] Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598627 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59dea802-19ab-4ca5-83b4-72e8422734f0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598671 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59dea802-19ab-4ca5-83b4-72e8422734f0-trusted-ca\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598705 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-registry-tls\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598723 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dmgt\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-kube-api-access-7dmgt\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598744 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59dea802-19ab-4ca5-83b4-72e8422734f0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598893 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59dea802-19ab-4ca5-83b4-72e8422734f0-registry-certificates\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.598985 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.599019 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-bound-sa-token\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.623420 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.699965 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-bound-sa-token\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700040 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59dea802-19ab-4ca5-83b4-72e8422734f0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700069 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59dea802-19ab-4ca5-83b4-72e8422734f0-trusted-ca\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700110 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-registry-tls\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700134 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dmgt\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-kube-api-access-7dmgt\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700161 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59dea802-19ab-4ca5-83b4-72e8422734f0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700193 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59dea802-19ab-4ca5-83b4-72e8422734f0-registry-certificates\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.700566 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/59dea802-19ab-4ca5-83b4-72e8422734f0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.701427 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/59dea802-19ab-4ca5-83b4-72e8422734f0-registry-certificates\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.702811 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/59dea802-19ab-4ca5-83b4-72e8422734f0-trusted-ca\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.706869 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/59dea802-19ab-4ca5-83b4-72e8422734f0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.707411 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-registry-tls\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.715236 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-bound-sa-token\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.717883 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dmgt\" (UniqueName: \"kubernetes.io/projected/59dea802-19ab-4ca5-83b4-72e8422734f0-kube-api-access-7dmgt\") pod \"image-registry-66df7c8f76-w48sl\" (UID: \"59dea802-19ab-4ca5-83b4-72e8422734f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:16 crc kubenswrapper[4941]: I1130 06:52:16.799856 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:17 crc kubenswrapper[4941]: I1130 06:52:17.202983 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-w48sl"] Nov 30 06:52:17 crc kubenswrapper[4941]: I1130 06:52:17.657297 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" event={"ID":"59dea802-19ab-4ca5-83b4-72e8422734f0","Type":"ContainerStarted","Data":"8aafc7d9cf304797dc4e41fabe21c82c97ce4cd0431710a5a1167f810780c9c7"} Nov 30 06:52:17 crc kubenswrapper[4941]: I1130 06:52:17.657660 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" event={"ID":"59dea802-19ab-4ca5-83b4-72e8422734f0","Type":"ContainerStarted","Data":"d5a1cb0dae63df5ffb8ea078acf7bc51f189f5ec3b18bc1ed3174dd4007a1e8a"} Nov 30 06:52:17 crc kubenswrapper[4941]: I1130 06:52:17.657843 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:17 crc kubenswrapper[4941]: I1130 06:52:17.685578 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" podStartSLOduration=1.6855512689999999 podStartE2EDuration="1.685551269s" podCreationTimestamp="2025-11-30 06:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:52:17.677505161 +0000 UTC m=+358.445676780" watchObservedRunningTime="2025-11-30 06:52:17.685551269 +0000 UTC m=+358.453722888" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.007051 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-67mqs"] Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.008634 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.011064 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.018727 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mqs"] Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.153220 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-utilities\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.153269 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-catalog-content\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.153354 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpmfd\" (UniqueName: \"kubernetes.io/projected/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-kube-api-access-jpmfd\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.195246 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s5mh6"] Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.198832 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.202844 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.204091 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s5mh6"] Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.254286 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpmfd\" (UniqueName: \"kubernetes.io/projected/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-kube-api-access-jpmfd\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.254424 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-utilities\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.254470 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-catalog-content\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.255152 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-utilities\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.255555 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-catalog-content\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.277664 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpmfd\" (UniqueName: \"kubernetes.io/projected/5f241870-dc20-4e9d-b9ca-6504fc0b3eb1-kube-api-access-jpmfd\") pod \"redhat-marketplace-67mqs\" (UID: \"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1\") " pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.356452 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4faa4588-9dd2-4bf3-a983-acbe2b5996db-utilities\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.356710 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klrgl\" (UniqueName: \"kubernetes.io/projected/4faa4588-9dd2-4bf3-a983-acbe2b5996db-kube-api-access-klrgl\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.356804 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4faa4588-9dd2-4bf3-a983-acbe2b5996db-catalog-content\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.389073 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.458018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4faa4588-9dd2-4bf3-a983-acbe2b5996db-utilities\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.458111 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klrgl\" (UniqueName: \"kubernetes.io/projected/4faa4588-9dd2-4bf3-a983-acbe2b5996db-kube-api-access-klrgl\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.458143 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4faa4588-9dd2-4bf3-a983-acbe2b5996db-catalog-content\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.459235 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4faa4588-9dd2-4bf3-a983-acbe2b5996db-utilities\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.460663 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4faa4588-9dd2-4bf3-a983-acbe2b5996db-catalog-content\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.484619 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klrgl\" (UniqueName: \"kubernetes.io/projected/4faa4588-9dd2-4bf3-a983-acbe2b5996db-kube-api-access-klrgl\") pod \"redhat-operators-s5mh6\" (UID: \"4faa4588-9dd2-4bf3-a983-acbe2b5996db\") " pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.529826 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.827084 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-67mqs"] Nov 30 06:52:20 crc kubenswrapper[4941]: I1130 06:52:20.915659 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s5mh6"] Nov 30 06:52:20 crc kubenswrapper[4941]: W1130 06:52:20.927542 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4faa4588_9dd2_4bf3_a983_acbe2b5996db.slice/crio-20cdda874fbef57e2b1aa0b043431c23fd344351b05525688a95ee9f44c82f27 WatchSource:0}: Error finding container 20cdda874fbef57e2b1aa0b043431c23fd344351b05525688a95ee9f44c82f27: Status 404 returned error can't find the container with id 20cdda874fbef57e2b1aa0b043431c23fd344351b05525688a95ee9f44c82f27 Nov 30 06:52:21 crc kubenswrapper[4941]: I1130 06:52:21.685817 4941 generic.go:334] "Generic (PLEG): container finished" podID="5f241870-dc20-4e9d-b9ca-6504fc0b3eb1" containerID="356c4ed7d37d1d2eae65c1732c010930e3a43ba094dd09ed42624062be69e2db" exitCode=0 Nov 30 06:52:21 crc kubenswrapper[4941]: I1130 06:52:21.685931 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mqs" event={"ID":"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1","Type":"ContainerDied","Data":"356c4ed7d37d1d2eae65c1732c010930e3a43ba094dd09ed42624062be69e2db"} Nov 30 06:52:21 crc kubenswrapper[4941]: I1130 06:52:21.686254 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mqs" event={"ID":"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1","Type":"ContainerStarted","Data":"d67a9c1f428bc4f0b2f3e653196b62054d74a2a5574f82953eb045fc6829a6ac"} Nov 30 06:52:21 crc kubenswrapper[4941]: I1130 06:52:21.689171 4941 generic.go:334] "Generic (PLEG): container finished" podID="4faa4588-9dd2-4bf3-a983-acbe2b5996db" containerID="7ea9c53b2ab81c3bce29e9bd3db0480db15997280f02b7965f017c287f2b4a25" exitCode=0 Nov 30 06:52:21 crc kubenswrapper[4941]: I1130 06:52:21.689211 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5mh6" event={"ID":"4faa4588-9dd2-4bf3-a983-acbe2b5996db","Type":"ContainerDied","Data":"7ea9c53b2ab81c3bce29e9bd3db0480db15997280f02b7965f017c287f2b4a25"} Nov 30 06:52:21 crc kubenswrapper[4941]: I1130 06:52:21.689240 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5mh6" event={"ID":"4faa4588-9dd2-4bf3-a983-acbe2b5996db","Type":"ContainerStarted","Data":"20cdda874fbef57e2b1aa0b043431c23fd344351b05525688a95ee9f44c82f27"} Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.392577 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5ntbl"] Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.393942 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.396158 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.402129 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5ntbl"] Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.488409 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-utilities\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.488617 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-catalog-content\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.488693 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjr5c\" (UniqueName: \"kubernetes.io/projected/876e6120-1086-4d94-a98d-a757228000ae-kube-api-access-tjr5c\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.589932 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-catalog-content\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.589990 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjr5c\" (UniqueName: \"kubernetes.io/projected/876e6120-1086-4d94-a98d-a757228000ae-kube-api-access-tjr5c\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.590053 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-utilities\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.590519 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-utilities\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.591099 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-catalog-content\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.598166 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s49t5"] Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.600017 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.602122 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.602649 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s49t5"] Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.614361 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjr5c\" (UniqueName: \"kubernetes.io/projected/876e6120-1086-4d94-a98d-a757228000ae-kube-api-access-tjr5c\") pod \"community-operators-5ntbl\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.690799 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-catalog-content\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.691002 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gnll\" (UniqueName: \"kubernetes.io/projected/53470168-a7a9-4617-83d5-3acbbd4467e5-kube-api-access-5gnll\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.691034 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-utilities\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.696587 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5mh6" event={"ID":"4faa4588-9dd2-4bf3-a983-acbe2b5996db","Type":"ContainerStarted","Data":"0900bc1f69d44b1a26af71a4136a5ebd6212de6567fcee2c1390f303908ab217"} Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.698023 4941 generic.go:334] "Generic (PLEG): container finished" podID="5f241870-dc20-4e9d-b9ca-6504fc0b3eb1" containerID="f74a3618f260a5488c4f1a0bf70f526f364e44c2709da635cdd8d6fe706faf3a" exitCode=0 Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.698052 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mqs" event={"ID":"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1","Type":"ContainerDied","Data":"f74a3618f260a5488c4f1a0bf70f526f364e44c2709da635cdd8d6fe706faf3a"} Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.791588 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.791991 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-catalog-content\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.792106 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gnll\" (UniqueName: \"kubernetes.io/projected/53470168-a7a9-4617-83d5-3acbbd4467e5-kube-api-access-5gnll\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.792143 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-utilities\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.792472 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-catalog-content\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.794532 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-utilities\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.822477 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gnll\" (UniqueName: \"kubernetes.io/projected/53470168-a7a9-4617-83d5-3acbbd4467e5-kube-api-access-5gnll\") pod \"certified-operators-s49t5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:22 crc kubenswrapper[4941]: I1130 06:52:22.915034 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.169917 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5ntbl"] Nov 30 06:52:23 crc kubenswrapper[4941]: W1130 06:52:23.172045 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod876e6120_1086_4d94_a98d_a757228000ae.slice/crio-1d69989f7bd29f3dc43f77b629220abe8db4e8f357174484db835268ca14d5c6 WatchSource:0}: Error finding container 1d69989f7bd29f3dc43f77b629220abe8db4e8f357174484db835268ca14d5c6: Status 404 returned error can't find the container with id 1d69989f7bd29f3dc43f77b629220abe8db4e8f357174484db835268ca14d5c6 Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.316803 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s49t5"] Nov 30 06:52:23 crc kubenswrapper[4941]: W1130 06:52:23.358418 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod53470168_a7a9_4617_83d5_3acbbd4467e5.slice/crio-df4879fc8a64b3a2092759f8aec3afad8930834a4b16c33238859f12017df7f4 WatchSource:0}: Error finding container df4879fc8a64b3a2092759f8aec3afad8930834a4b16c33238859f12017df7f4: Status 404 returned error can't find the container with id df4879fc8a64b3a2092759f8aec3afad8930834a4b16c33238859f12017df7f4 Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.705502 4941 generic.go:334] "Generic (PLEG): container finished" podID="4faa4588-9dd2-4bf3-a983-acbe2b5996db" containerID="0900bc1f69d44b1a26af71a4136a5ebd6212de6567fcee2c1390f303908ab217" exitCode=0 Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.705589 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5mh6" event={"ID":"4faa4588-9dd2-4bf3-a983-acbe2b5996db","Type":"ContainerDied","Data":"0900bc1f69d44b1a26af71a4136a5ebd6212de6567fcee2c1390f303908ab217"} Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.710137 4941 generic.go:334] "Generic (PLEG): container finished" podID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerID="baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350" exitCode=0 Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.710211 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s49t5" event={"ID":"53470168-a7a9-4617-83d5-3acbbd4467e5","Type":"ContainerDied","Data":"baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350"} Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.710446 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s49t5" event={"ID":"53470168-a7a9-4617-83d5-3acbbd4467e5","Type":"ContainerStarted","Data":"df4879fc8a64b3a2092759f8aec3afad8930834a4b16c33238859f12017df7f4"} Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.718836 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-67mqs" event={"ID":"5f241870-dc20-4e9d-b9ca-6504fc0b3eb1","Type":"ContainerStarted","Data":"2c3ca3ce5587785ca612131a0bba70445ce60492c24ebafdc78796cd9345572c"} Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.720480 4941 generic.go:334] "Generic (PLEG): container finished" podID="876e6120-1086-4d94-a98d-a757228000ae" containerID="9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d" exitCode=0 Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.720532 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntbl" event={"ID":"876e6120-1086-4d94-a98d-a757228000ae","Type":"ContainerDied","Data":"9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d"} Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.720563 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntbl" event={"ID":"876e6120-1086-4d94-a98d-a757228000ae","Type":"ContainerStarted","Data":"1d69989f7bd29f3dc43f77b629220abe8db4e8f357174484db835268ca14d5c6"} Nov 30 06:52:23 crc kubenswrapper[4941]: I1130 06:52:23.750105 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-67mqs" podStartSLOduration=3.236518898 podStartE2EDuration="4.750087789s" podCreationTimestamp="2025-11-30 06:52:19 +0000 UTC" firstStartedPulling="2025-11-30 06:52:21.687944446 +0000 UTC m=+362.456116055" lastFinishedPulling="2025-11-30 06:52:23.201513337 +0000 UTC m=+363.969684946" observedRunningTime="2025-11-30 06:52:23.742463285 +0000 UTC m=+364.510634924" watchObservedRunningTime="2025-11-30 06:52:23.750087789 +0000 UTC m=+364.518259408" Nov 30 06:52:24 crc kubenswrapper[4941]: I1130 06:52:24.726731 4941 generic.go:334] "Generic (PLEG): container finished" podID="876e6120-1086-4d94-a98d-a757228000ae" containerID="787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833" exitCode=0 Nov 30 06:52:24 crc kubenswrapper[4941]: I1130 06:52:24.726819 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntbl" event={"ID":"876e6120-1086-4d94-a98d-a757228000ae","Type":"ContainerDied","Data":"787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833"} Nov 30 06:52:24 crc kubenswrapper[4941]: I1130 06:52:24.730233 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5mh6" event={"ID":"4faa4588-9dd2-4bf3-a983-acbe2b5996db","Type":"ContainerStarted","Data":"5f6c737ee1277c247c22819db3cc2c020fb6bfa6590b8056bb55dfdb0506f010"} Nov 30 06:52:24 crc kubenswrapper[4941]: I1130 06:52:24.733805 4941 generic.go:334] "Generic (PLEG): container finished" podID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerID="4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9" exitCode=0 Nov 30 06:52:24 crc kubenswrapper[4941]: I1130 06:52:24.733909 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s49t5" event={"ID":"53470168-a7a9-4617-83d5-3acbbd4467e5","Type":"ContainerDied","Data":"4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9"} Nov 30 06:52:24 crc kubenswrapper[4941]: I1130 06:52:24.761512 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s5mh6" podStartSLOduration=2.315979147 podStartE2EDuration="4.76149522s" podCreationTimestamp="2025-11-30 06:52:20 +0000 UTC" firstStartedPulling="2025-11-30 06:52:21.693138422 +0000 UTC m=+362.461310031" lastFinishedPulling="2025-11-30 06:52:24.138654485 +0000 UTC m=+364.906826104" observedRunningTime="2025-11-30 06:52:24.758158133 +0000 UTC m=+365.526329742" watchObservedRunningTime="2025-11-30 06:52:24.76149522 +0000 UTC m=+365.529666829" Nov 30 06:52:25 crc kubenswrapper[4941]: I1130 06:52:25.746448 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntbl" event={"ID":"876e6120-1086-4d94-a98d-a757228000ae","Type":"ContainerStarted","Data":"63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6"} Nov 30 06:52:25 crc kubenswrapper[4941]: I1130 06:52:25.748354 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s49t5" event={"ID":"53470168-a7a9-4617-83d5-3acbbd4467e5","Type":"ContainerStarted","Data":"fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29"} Nov 30 06:52:25 crc kubenswrapper[4941]: I1130 06:52:25.766571 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5ntbl" podStartSLOduration=2.377111081 podStartE2EDuration="3.766556477s" podCreationTimestamp="2025-11-30 06:52:22 +0000 UTC" firstStartedPulling="2025-11-30 06:52:23.721798642 +0000 UTC m=+364.489970261" lastFinishedPulling="2025-11-30 06:52:25.111244048 +0000 UTC m=+365.879415657" observedRunningTime="2025-11-30 06:52:25.762641592 +0000 UTC m=+366.530813201" watchObservedRunningTime="2025-11-30 06:52:25.766556477 +0000 UTC m=+366.534728076" Nov 30 06:52:25 crc kubenswrapper[4941]: I1130 06:52:25.784400 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s49t5" podStartSLOduration=2.2754759780000002 podStartE2EDuration="3.78438534s" podCreationTimestamp="2025-11-30 06:52:22 +0000 UTC" firstStartedPulling="2025-11-30 06:52:23.714614841 +0000 UTC m=+364.482786460" lastFinishedPulling="2025-11-30 06:52:25.223524213 +0000 UTC m=+365.991695822" observedRunningTime="2025-11-30 06:52:25.781850808 +0000 UTC m=+366.550022417" watchObservedRunningTime="2025-11-30 06:52:25.78438534 +0000 UTC m=+366.552556949" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.390001 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.390369 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.438213 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.531053 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.531096 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.594960 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.820616 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-67mqs" Nov 30 06:52:30 crc kubenswrapper[4941]: I1130 06:52:30.851006 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s5mh6" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.792743 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.792959 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.864408 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.916698 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.916805 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.970527 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.979210 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:52:32 crc kubenswrapper[4941]: I1130 06:52:32.979295 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:52:33 crc kubenswrapper[4941]: I1130 06:52:33.851642 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 06:52:33 crc kubenswrapper[4941]: I1130 06:52:33.868762 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 06:52:36 crc kubenswrapper[4941]: I1130 06:52:36.807147 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-w48sl" Nov 30 06:52:36 crc kubenswrapper[4941]: I1130 06:52:36.925687 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krsrr"] Nov 30 06:53:01 crc kubenswrapper[4941]: I1130 06:53:01.978848 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" podUID="52150dbc-4724-4cc3-a326-5caaae27246e" containerName="registry" containerID="cri-o://534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978" gracePeriod=30 Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.404101 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461204 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-registry-certificates\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461297 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52150dbc-4724-4cc3-a326-5caaae27246e-installation-pull-secrets\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461393 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-bound-sa-token\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461553 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461630 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-registry-tls\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461666 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52150dbc-4724-4cc3-a326-5caaae27246e-ca-trust-extracted\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461694 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8ngh\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-kube-api-access-x8ngh\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.461724 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-trusted-ca\") pod \"52150dbc-4724-4cc3-a326-5caaae27246e\" (UID: \"52150dbc-4724-4cc3-a326-5caaae27246e\") " Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.462661 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.463025 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.468673 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-kube-api-access-x8ngh" (OuterVolumeSpecName: "kube-api-access-x8ngh") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "kube-api-access-x8ngh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.479029 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52150dbc-4724-4cc3-a326-5caaae27246e-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.479186 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.479543 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.486408 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52150dbc-4724-4cc3-a326-5caaae27246e-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.486732 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "52150dbc-4724-4cc3-a326-5caaae27246e" (UID: "52150dbc-4724-4cc3-a326-5caaae27246e"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563464 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563501 4941 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52150dbc-4724-4cc3-a326-5caaae27246e-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563515 4941 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52150dbc-4724-4cc3-a326-5caaae27246e-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563524 4941 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563532 4941 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563540 4941 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52150dbc-4724-4cc3-a326-5caaae27246e-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:02 crc kubenswrapper[4941]: I1130 06:53:02.563548 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8ngh\" (UniqueName: \"kubernetes.io/projected/52150dbc-4724-4cc3-a326-5caaae27246e-kube-api-access-x8ngh\") on node \"crc\" DevicePath \"\"" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:02.978872 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:02.978924 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.029212 4941 generic.go:334] "Generic (PLEG): container finished" podID="52150dbc-4724-4cc3-a326-5caaae27246e" containerID="534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978" exitCode=0 Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.029256 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" event={"ID":"52150dbc-4724-4cc3-a326-5caaae27246e","Type":"ContainerDied","Data":"534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978"} Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.029281 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" event={"ID":"52150dbc-4724-4cc3-a326-5caaae27246e","Type":"ContainerDied","Data":"51241397ad6bb77321e81922004a201d0dde6bbb061c389bded0a08daa4347d0"} Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.029300 4941 scope.go:117] "RemoveContainer" containerID="534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.029420 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-krsrr" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.049137 4941 scope.go:117] "RemoveContainer" containerID="534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.054549 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krsrr"] Nov 30 06:53:03 crc kubenswrapper[4941]: E1130 06:53:03.055898 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978\": container with ID starting with 534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978 not found: ID does not exist" containerID="534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.055945 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978"} err="failed to get container status \"534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978\": rpc error: code = NotFound desc = could not find container \"534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978\": container with ID starting with 534d5a3e50f605f08798b1d83304c61cf5e6ceca1408c909cd1e239ec546b978 not found: ID does not exist" Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.059412 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-krsrr"] Nov 30 06:53:03 crc kubenswrapper[4941]: I1130 06:53:03.540885 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52150dbc-4724-4cc3-a326-5caaae27246e" path="/var/lib/kubelet/pods/52150dbc-4724-4cc3-a326-5caaae27246e/volumes" Nov 30 06:53:32 crc kubenswrapper[4941]: I1130 06:53:32.979159 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:53:32 crc kubenswrapper[4941]: I1130 06:53:32.980213 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:53:32 crc kubenswrapper[4941]: I1130 06:53:32.980306 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:53:32 crc kubenswrapper[4941]: I1130 06:53:32.981396 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b1a9f4cdaf11c0f0c41c32ab91daf48e0bfa2787a5967c81bc4ee521f92fdb75"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 06:53:32 crc kubenswrapper[4941]: I1130 06:53:32.981525 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://b1a9f4cdaf11c0f0c41c32ab91daf48e0bfa2787a5967c81bc4ee521f92fdb75" gracePeriod=600 Nov 30 06:53:33 crc kubenswrapper[4941]: I1130 06:53:33.267063 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="b1a9f4cdaf11c0f0c41c32ab91daf48e0bfa2787a5967c81bc4ee521f92fdb75" exitCode=0 Nov 30 06:53:33 crc kubenswrapper[4941]: I1130 06:53:33.267175 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"b1a9f4cdaf11c0f0c41c32ab91daf48e0bfa2787a5967c81bc4ee521f92fdb75"} Nov 30 06:53:33 crc kubenswrapper[4941]: I1130 06:53:33.267532 4941 scope.go:117] "RemoveContainer" containerID="cda0f3c230f1b33789e50715125b38f6f97149fbdadc5013f77463f8da628363" Nov 30 06:53:34 crc kubenswrapper[4941]: I1130 06:53:34.275977 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"2999b81d3c5dcc4cb61fb09cb18df1d902ada4e3797e913664d663ea2105dfb2"} Nov 30 06:55:19 crc kubenswrapper[4941]: I1130 06:55:19.734664 4941 scope.go:117] "RemoveContainer" containerID="9c82a8e97a95c88ea99448ae35c8a31321f662baf212b93d05541597250ac65c" Nov 30 06:56:02 crc kubenswrapper[4941]: I1130 06:56:02.979546 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:56:02 crc kubenswrapper[4941]: I1130 06:56:02.980659 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:56:19 crc kubenswrapper[4941]: I1130 06:56:19.767237 4941 scope.go:117] "RemoveContainer" containerID="fd27bc67ee98bd0c07e931457cfaabd4c8532938e9fdde0618646a54862c6254" Nov 30 06:56:19 crc kubenswrapper[4941]: I1130 06:56:19.800664 4941 scope.go:117] "RemoveContainer" containerID="bfb5472b56146c15184c3111142a8814d7c20368f48c3117d79bd9d4daa0e9f6" Nov 30 06:56:32 crc kubenswrapper[4941]: I1130 06:56:32.978110 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:56:32 crc kubenswrapper[4941]: I1130 06:56:32.978734 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:57:02 crc kubenswrapper[4941]: I1130 06:57:02.979000 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:57:02 crc kubenswrapper[4941]: I1130 06:57:02.979669 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:57:02 crc kubenswrapper[4941]: I1130 06:57:02.979727 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 06:57:02 crc kubenswrapper[4941]: I1130 06:57:02.980432 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2999b81d3c5dcc4cb61fb09cb18df1d902ada4e3797e913664d663ea2105dfb2"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 06:57:02 crc kubenswrapper[4941]: I1130 06:57:02.980527 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://2999b81d3c5dcc4cb61fb09cb18df1d902ada4e3797e913664d663ea2105dfb2" gracePeriod=600 Nov 30 06:57:03 crc kubenswrapper[4941]: I1130 06:57:03.724274 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="2999b81d3c5dcc4cb61fb09cb18df1d902ada4e3797e913664d663ea2105dfb2" exitCode=0 Nov 30 06:57:03 crc kubenswrapper[4941]: I1130 06:57:03.724382 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"2999b81d3c5dcc4cb61fb09cb18df1d902ada4e3797e913664d663ea2105dfb2"} Nov 30 06:57:03 crc kubenswrapper[4941]: I1130 06:57:03.724898 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"88a82781e74dc63c736752840ee31da64c053c5a7d4b1a678036abaa19f971dc"} Nov 30 06:57:03 crc kubenswrapper[4941]: I1130 06:57:03.724925 4941 scope.go:117] "RemoveContainer" containerID="b1a9f4cdaf11c0f0c41c32ab91daf48e0bfa2787a5967c81bc4ee521f92fdb75" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.671613 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-kd2wq"] Nov 30 06:58:01 crc kubenswrapper[4941]: E1130 06:58:01.672910 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52150dbc-4724-4cc3-a326-5caaae27246e" containerName="registry" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.672943 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="52150dbc-4724-4cc3-a326-5caaae27246e" containerName="registry" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.673167 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="52150dbc-4724-4cc3-a326-5caaae27246e" containerName="registry" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.673961 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.676618 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.677776 4941 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-tkjrj" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.678103 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.680819 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.681889 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kd2wq"] Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.836017 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26hwk\" (UniqueName: \"kubernetes.io/projected/48497e9c-eebe-41d6-8ed3-a9717a43ddae-kube-api-access-26hwk\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.836138 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48497e9c-eebe-41d6-8ed3-a9717a43ddae-crc-storage\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.836406 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48497e9c-eebe-41d6-8ed3-a9717a43ddae-node-mnt\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.937502 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48497e9c-eebe-41d6-8ed3-a9717a43ddae-crc-storage\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.937654 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48497e9c-eebe-41d6-8ed3-a9717a43ddae-node-mnt\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.937702 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26hwk\" (UniqueName: \"kubernetes.io/projected/48497e9c-eebe-41d6-8ed3-a9717a43ddae-kube-api-access-26hwk\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.938275 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48497e9c-eebe-41d6-8ed3-a9717a43ddae-node-mnt\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.938726 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48497e9c-eebe-41d6-8ed3-a9717a43ddae-crc-storage\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:01 crc kubenswrapper[4941]: I1130 06:58:01.972955 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26hwk\" (UniqueName: \"kubernetes.io/projected/48497e9c-eebe-41d6-8ed3-a9717a43ddae-kube-api-access-26hwk\") pod \"crc-storage-crc-kd2wq\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:02 crc kubenswrapper[4941]: I1130 06:58:02.030727 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:02 crc kubenswrapper[4941]: I1130 06:58:02.461544 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kd2wq"] Nov 30 06:58:02 crc kubenswrapper[4941]: I1130 06:58:02.468531 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 06:58:03 crc kubenswrapper[4941]: I1130 06:58:03.121060 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kd2wq" event={"ID":"48497e9c-eebe-41d6-8ed3-a9717a43ddae","Type":"ContainerStarted","Data":"e3f38e8892d098078bd251e059c2ae93acd0b1929b70aff1969b1b8a6746891a"} Nov 30 06:58:04 crc kubenswrapper[4941]: I1130 06:58:04.132579 4941 generic.go:334] "Generic (PLEG): container finished" podID="48497e9c-eebe-41d6-8ed3-a9717a43ddae" containerID="37c1261a13fee2347e5c38fc1d203f431cf5b5e6fcdab49d922d941f419e5f05" exitCode=0 Nov 30 06:58:04 crc kubenswrapper[4941]: I1130 06:58:04.132695 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kd2wq" event={"ID":"48497e9c-eebe-41d6-8ed3-a9717a43ddae","Type":"ContainerDied","Data":"37c1261a13fee2347e5c38fc1d203f431cf5b5e6fcdab49d922d941f419e5f05"} Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.366623 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.499096 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26hwk\" (UniqueName: \"kubernetes.io/projected/48497e9c-eebe-41d6-8ed3-a9717a43ddae-kube-api-access-26hwk\") pod \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.499196 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48497e9c-eebe-41d6-8ed3-a9717a43ddae-crc-storage\") pod \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.499270 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48497e9c-eebe-41d6-8ed3-a9717a43ddae-node-mnt\") pod \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\" (UID: \"48497e9c-eebe-41d6-8ed3-a9717a43ddae\") " Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.499542 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48497e9c-eebe-41d6-8ed3-a9717a43ddae-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "48497e9c-eebe-41d6-8ed3-a9717a43ddae" (UID: "48497e9c-eebe-41d6-8ed3-a9717a43ddae"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.504632 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48497e9c-eebe-41d6-8ed3-a9717a43ddae-kube-api-access-26hwk" (OuterVolumeSpecName: "kube-api-access-26hwk") pod "48497e9c-eebe-41d6-8ed3-a9717a43ddae" (UID: "48497e9c-eebe-41d6-8ed3-a9717a43ddae"). InnerVolumeSpecName "kube-api-access-26hwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.513803 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48497e9c-eebe-41d6-8ed3-a9717a43ddae-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "48497e9c-eebe-41d6-8ed3-a9717a43ddae" (UID: "48497e9c-eebe-41d6-8ed3-a9717a43ddae"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.600818 4941 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48497e9c-eebe-41d6-8ed3-a9717a43ddae-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.600860 4941 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48497e9c-eebe-41d6-8ed3-a9717a43ddae-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:05 crc kubenswrapper[4941]: I1130 06:58:05.600873 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26hwk\" (UniqueName: \"kubernetes.io/projected/48497e9c-eebe-41d6-8ed3-a9717a43ddae-kube-api-access-26hwk\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:06 crc kubenswrapper[4941]: I1130 06:58:06.143661 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kd2wq" event={"ID":"48497e9c-eebe-41d6-8ed3-a9717a43ddae","Type":"ContainerDied","Data":"e3f38e8892d098078bd251e059c2ae93acd0b1929b70aff1969b1b8a6746891a"} Nov 30 06:58:06 crc kubenswrapper[4941]: I1130 06:58:06.143697 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kd2wq" Nov 30 06:58:06 crc kubenswrapper[4941]: I1130 06:58:06.143705 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3f38e8892d098078bd251e059c2ae93acd0b1929b70aff1969b1b8a6746891a" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.175909 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn"] Nov 30 06:58:13 crc kubenswrapper[4941]: E1130 06:58:13.176807 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48497e9c-eebe-41d6-8ed3-a9717a43ddae" containerName="storage" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.176822 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="48497e9c-eebe-41d6-8ed3-a9717a43ddae" containerName="storage" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.176954 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="48497e9c-eebe-41d6-8ed3-a9717a43ddae" containerName="storage" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.177717 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.180358 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.195577 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn"] Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.339527 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktw52\" (UniqueName: \"kubernetes.io/projected/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-kube-api-access-ktw52\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.339754 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.339812 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.441585 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktw52\" (UniqueName: \"kubernetes.io/projected/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-kube-api-access-ktw52\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.441755 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.441811 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.442905 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.442932 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.477530 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktw52\" (UniqueName: \"kubernetes.io/projected/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-kube-api-access-ktw52\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.498159 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.719919 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn"] Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.867576 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zntd2"] Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868254 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-controller" containerID="cri-o://83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868442 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="nbdb" containerID="cri-o://9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868591 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="northd" containerID="cri-o://64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868316 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868377 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="sbdb" containerID="cri-o://3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868802 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-acl-logging" containerID="cri-o://2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.868843 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-node" containerID="cri-o://b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" gracePeriod=30 Nov 30 06:58:13 crc kubenswrapper[4941]: I1130 06:58:13.906831 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" containerID="cri-o://b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" gracePeriod=30 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.148709 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/3.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.152137 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovn-acl-logging/0.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.152985 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovn-controller/0.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.153917 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.212413 4941 generic.go:334] "Generic (PLEG): container finished" podID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerID="36ae5dd9d38ff79b0f221f32fdc69038f4ddc2a2224ab7219b9da72e897a6d5d" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.212544 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" event={"ID":"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e","Type":"ContainerDied","Data":"36ae5dd9d38ff79b0f221f32fdc69038f4ddc2a2224ab7219b9da72e897a6d5d"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.212586 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" event={"ID":"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e","Type":"ContainerStarted","Data":"07a39b392ee4b8f05cefd9f60b71b8ed4b9367577be4efe01f62c23efdf8bd3c"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.222478 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovnkube-controller/3.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.228103 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovn-acl-logging/0.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.229437 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-zntd2_a6217364-7317-4ee9-957e-9a1764ff0342/ovn-controller/0.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230012 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-ns6d8"] Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230089 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230127 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230142 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230160 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230177 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230190 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" exitCode=0 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230206 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" exitCode=143 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230220 4941 generic.go:334] "Generic (PLEG): container finished" podID="a6217364-7317-4ee9-957e-9a1764ff0342" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" exitCode=143 Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230377 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230399 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230416 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230425 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230439 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="nbdb" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230447 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="nbdb" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230460 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230468 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230478 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230486 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230499 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="sbdb" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230506 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="sbdb" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230511 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230520 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="northd" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230678 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="northd" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230696 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kubecfg-setup" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230704 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kubecfg-setup" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230718 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-acl-logging" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230725 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-acl-logging" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230740 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-node" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230748 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-node" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.230757 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-ovn-metrics" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.230765 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-ovn-metrics" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231613 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231652 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231678 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="nbdb" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231746 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231760 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-ovn-metrics" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231779 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="sbdb" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231799 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="northd" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231820 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovn-acl-logging" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231835 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="kube-rbac-proxy-node" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.231857 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.232066 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.232087 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.232105 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.232119 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.232301 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.232731 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" containerName="ovnkube-controller" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236592 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236677 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236713 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236740 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236768 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236798 4941 scope.go:117] "RemoveContainer" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.236770 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.237915 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.237947 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238253 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238265 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238272 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238282 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238289 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238296 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238302 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238309 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238321 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238334 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238347 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238370 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238377 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238384 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238391 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238397 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238404 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238412 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238419 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238429 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238442 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238450 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238460 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238469 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238477 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238484 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238491 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238498 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.238505 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239151 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239192 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zntd2" event={"ID":"a6217364-7317-4ee9-957e-9a1764ff0342","Type":"ContainerDied","Data":"8afa66c174f64be3cab1d8a845dec34f66aef255f2e5692b9e1e17ff7301f2c2"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239207 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239224 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239232 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239240 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239248 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239256 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239263 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239271 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239279 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239286 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.239740 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/2.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.241159 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/1.log" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.241197 4941 generic.go:334] "Generic (PLEG): container finished" podID="a2c22971-565b-44b0-9312-737c3931a558" containerID="f4933ca1d46a47b00734deaa91d22c2cfee015918f3ff0d5764ac80ce7d6f0f1" exitCode=2 Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.241226 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerDied","Data":"f4933ca1d46a47b00734deaa91d22c2cfee015918f3ff0d5764ac80ce7d6f0f1"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.241242 4941 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4"} Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.241826 4941 scope.go:117] "RemoveContainer" containerID="f4933ca1d46a47b00734deaa91d22c2cfee015918f3ff0d5764ac80ce7d6f0f1" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.242041 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vzc7c_openshift-multus(a2c22971-565b-44b0-9312-737c3931a558)\"" pod="openshift-multus/multus-vzc7c" podUID="a2c22971-565b-44b0-9312-737c3931a558" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.256926 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-slash\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257009 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdvkd\" (UniqueName: \"kubernetes.io/projected/a6217364-7317-4ee9-957e-9a1764ff0342-kube-api-access-kdvkd\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257064 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-env-overrides\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257054 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-slash" (OuterVolumeSpecName: "host-slash") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257095 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-var-lib-openvswitch\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257136 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257197 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-script-lib\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257283 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-kubelet\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257313 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-netns\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257383 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-netd\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257421 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-bin\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257438 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257490 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257457 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-ovn\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257523 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257552 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257580 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257605 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-systemd\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257611 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257637 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257670 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-config\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257728 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-node-log\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257759 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-node-log" (OuterVolumeSpecName: "node-log") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257776 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-openvswitch\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257812 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-etc-openvswitch\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257837 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-ovn-kubernetes\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257863 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257871 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-log-socket\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257894 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257908 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-var-lib-cni-networks-ovn-kubernetes\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257918 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258002 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258023 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-log-socket" (OuterVolumeSpecName: "log-socket") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258043 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.257971 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-systemd-units\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258151 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6217364-7317-4ee9-957e-9a1764ff0342-ovn-node-metrics-cert\") pod \"a6217364-7317-4ee9-957e-9a1764ff0342\" (UID: \"a6217364-7317-4ee9-957e-9a1764ff0342\") " Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258201 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258494 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-slash\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258528 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-ovn\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258561 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-ovnkube-script-lib\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258582 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-run-ovn-kubernetes\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258748 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-cni-bin\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258801 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-ovnkube-config\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258827 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-systemd\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258852 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-kubelet\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.258903 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259001 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-var-lib-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259088 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-systemd-units\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259115 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-etc-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259139 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-run-netns\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259245 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/523102da-8e26-4563-8286-41a900e25fb1-ovn-node-metrics-cert\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259330 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-env-overrides\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259393 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-node-log\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259933 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-cni-netd\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.259975 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfpld\" (UniqueName: \"kubernetes.io/projected/523102da-8e26-4563-8286-41a900e25fb1-kube-api-access-vfpld\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260010 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260067 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-log-socket\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260170 4941 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260185 4941 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-slash\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260199 4941 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260212 4941 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260225 4941 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260431 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260736 4941 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260757 4941 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260767 4941 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260778 4941 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260792 4941 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260804 4941 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6217364-7317-4ee9-957e-9a1764ff0342-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260815 4941 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-node-log\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260828 4941 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260841 4941 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260852 4941 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260863 4941 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-log-socket\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.260877 4941 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.264631 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6217364-7317-4ee9-957e-9a1764ff0342-kube-api-access-kdvkd" (OuterVolumeSpecName: "kube-api-access-kdvkd") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "kube-api-access-kdvkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.268585 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6217364-7317-4ee9-957e-9a1764ff0342-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.281536 4941 scope.go:117] "RemoveContainer" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.281952 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "a6217364-7317-4ee9-957e-9a1764ff0342" (UID: "a6217364-7317-4ee9-957e-9a1764ff0342"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.304701 4941 scope.go:117] "RemoveContainer" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.323163 4941 scope.go:117] "RemoveContainer" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.340309 4941 scope.go:117] "RemoveContainer" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.355160 4941 scope.go:117] "RemoveContainer" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361627 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/523102da-8e26-4563-8286-41a900e25fb1-ovn-node-metrics-cert\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361666 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-env-overrides\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361693 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-node-log\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361713 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-cni-netd\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361730 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfpld\" (UniqueName: \"kubernetes.io/projected/523102da-8e26-4563-8286-41a900e25fb1-kube-api-access-vfpld\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361748 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361768 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-log-socket\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361788 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-slash\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361810 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-ovn\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361903 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-node-log\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361934 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-cni-netd\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361947 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361987 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-slash\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362020 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-ovn\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.361855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-ovnkube-script-lib\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362028 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-log-socket\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362057 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-run-ovn-kubernetes\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362114 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-run-ovn-kubernetes\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362084 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-cni-bin\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362167 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-ovnkube-config\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362231 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-cni-bin\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362363 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-kubelet\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362368 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-kubelet\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362395 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-systemd\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362420 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362491 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-run-systemd\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362512 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362620 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-var-lib-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362875 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-ovnkube-config\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-env-overrides\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362913 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-var-lib-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362948 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-etc-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362978 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-etc-openvswitch\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.362986 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-systemd-units\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363011 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-systemd-units\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363015 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-run-netns\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363035 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/523102da-8e26-4563-8286-41a900e25fb1-host-run-netns\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363090 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6217364-7317-4ee9-957e-9a1764ff0342-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363106 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdvkd\" (UniqueName: \"kubernetes.io/projected/a6217364-7317-4ee9-957e-9a1764ff0342-kube-api-access-kdvkd\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363117 4941 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6217364-7317-4ee9-957e-9a1764ff0342-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.363274 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/523102da-8e26-4563-8286-41a900e25fb1-ovnkube-script-lib\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.366865 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/523102da-8e26-4563-8286-41a900e25fb1-ovn-node-metrics-cert\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.368376 4941 scope.go:117] "RemoveContainer" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.384744 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfpld\" (UniqueName: \"kubernetes.io/projected/523102da-8e26-4563-8286-41a900e25fb1-kube-api-access-vfpld\") pod \"ovnkube-node-ns6d8\" (UID: \"523102da-8e26-4563-8286-41a900e25fb1\") " pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.386083 4941 scope.go:117] "RemoveContainer" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.409078 4941 scope.go:117] "RemoveContainer" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.424572 4941 scope.go:117] "RemoveContainer" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.425186 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": container with ID starting with b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109 not found: ID does not exist" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.425251 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} err="failed to get container status \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": rpc error: code = NotFound desc = could not find container \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": container with ID starting with b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.425293 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.425733 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": container with ID starting with f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b not found: ID does not exist" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.425762 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} err="failed to get container status \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": rpc error: code = NotFound desc = could not find container \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": container with ID starting with f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.425780 4941 scope.go:117] "RemoveContainer" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.426179 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": container with ID starting with 3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b not found: ID does not exist" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.426426 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} err="failed to get container status \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": rpc error: code = NotFound desc = could not find container \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": container with ID starting with 3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.426461 4941 scope.go:117] "RemoveContainer" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.426947 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": container with ID starting with 9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a not found: ID does not exist" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.426967 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} err="failed to get container status \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": rpc error: code = NotFound desc = could not find container \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": container with ID starting with 9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.426987 4941 scope.go:117] "RemoveContainer" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.427298 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": container with ID starting with 64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9 not found: ID does not exist" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.427353 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} err="failed to get container status \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": rpc error: code = NotFound desc = could not find container \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": container with ID starting with 64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.427375 4941 scope.go:117] "RemoveContainer" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.427706 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": container with ID starting with 0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3 not found: ID does not exist" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.427733 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} err="failed to get container status \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": rpc error: code = NotFound desc = could not find container \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": container with ID starting with 0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.427754 4941 scope.go:117] "RemoveContainer" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.428048 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": container with ID starting with b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69 not found: ID does not exist" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.428079 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} err="failed to get container status \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": rpc error: code = NotFound desc = could not find container \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": container with ID starting with b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.428095 4941 scope.go:117] "RemoveContainer" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.428414 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": container with ID starting with 2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56 not found: ID does not exist" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.428454 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} err="failed to get container status \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": rpc error: code = NotFound desc = could not find container \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": container with ID starting with 2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.428474 4941 scope.go:117] "RemoveContainer" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.429375 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": container with ID starting with 83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869 not found: ID does not exist" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.429442 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} err="failed to get container status \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": rpc error: code = NotFound desc = could not find container \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": container with ID starting with 83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.429495 4941 scope.go:117] "RemoveContainer" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" Nov 30 06:58:14 crc kubenswrapper[4941]: E1130 06:58:14.430435 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": container with ID starting with 97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d not found: ID does not exist" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.430471 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} err="failed to get container status \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": rpc error: code = NotFound desc = could not find container \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": container with ID starting with 97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.430494 4941 scope.go:117] "RemoveContainer" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.431068 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} err="failed to get container status \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": rpc error: code = NotFound desc = could not find container \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": container with ID starting with b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.431100 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.431447 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} err="failed to get container status \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": rpc error: code = NotFound desc = could not find container \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": container with ID starting with f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.431482 4941 scope.go:117] "RemoveContainer" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.432459 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} err="failed to get container status \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": rpc error: code = NotFound desc = could not find container \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": container with ID starting with 3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.432480 4941 scope.go:117] "RemoveContainer" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.432813 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} err="failed to get container status \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": rpc error: code = NotFound desc = could not find container \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": container with ID starting with 9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.432844 4941 scope.go:117] "RemoveContainer" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.433146 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} err="failed to get container status \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": rpc error: code = NotFound desc = could not find container \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": container with ID starting with 64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.433177 4941 scope.go:117] "RemoveContainer" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.435030 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} err="failed to get container status \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": rpc error: code = NotFound desc = could not find container \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": container with ID starting with 0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.435062 4941 scope.go:117] "RemoveContainer" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.435473 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} err="failed to get container status \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": rpc error: code = NotFound desc = could not find container \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": container with ID starting with b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.435502 4941 scope.go:117] "RemoveContainer" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.435842 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} err="failed to get container status \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": rpc error: code = NotFound desc = could not find container \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": container with ID starting with 2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.435873 4941 scope.go:117] "RemoveContainer" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.436222 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} err="failed to get container status \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": rpc error: code = NotFound desc = could not find container \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": container with ID starting with 83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.436255 4941 scope.go:117] "RemoveContainer" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.436665 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} err="failed to get container status \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": rpc error: code = NotFound desc = could not find container \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": container with ID starting with 97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.436686 4941 scope.go:117] "RemoveContainer" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.436976 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} err="failed to get container status \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": rpc error: code = NotFound desc = could not find container \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": container with ID starting with b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.437046 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.437409 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} err="failed to get container status \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": rpc error: code = NotFound desc = could not find container \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": container with ID starting with f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.437437 4941 scope.go:117] "RemoveContainer" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.437841 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} err="failed to get container status \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": rpc error: code = NotFound desc = could not find container \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": container with ID starting with 3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.437868 4941 scope.go:117] "RemoveContainer" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.438163 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} err="failed to get container status \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": rpc error: code = NotFound desc = could not find container \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": container with ID starting with 9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.438196 4941 scope.go:117] "RemoveContainer" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.438598 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} err="failed to get container status \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": rpc error: code = NotFound desc = could not find container \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": container with ID starting with 64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.438624 4941 scope.go:117] "RemoveContainer" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439155 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} err="failed to get container status \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": rpc error: code = NotFound desc = could not find container \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": container with ID starting with 0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439184 4941 scope.go:117] "RemoveContainer" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439485 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} err="failed to get container status \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": rpc error: code = NotFound desc = could not find container \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": container with ID starting with b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439518 4941 scope.go:117] "RemoveContainer" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439723 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} err="failed to get container status \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": rpc error: code = NotFound desc = could not find container \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": container with ID starting with 2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439747 4941 scope.go:117] "RemoveContainer" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439962 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} err="failed to get container status \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": rpc error: code = NotFound desc = could not find container \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": container with ID starting with 83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.439984 4941 scope.go:117] "RemoveContainer" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.440301 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} err="failed to get container status \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": rpc error: code = NotFound desc = could not find container \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": container with ID starting with 97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.440359 4941 scope.go:117] "RemoveContainer" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.440735 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} err="failed to get container status \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": rpc error: code = NotFound desc = could not find container \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": container with ID starting with b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.440767 4941 scope.go:117] "RemoveContainer" containerID="f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.441189 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b"} err="failed to get container status \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": rpc error: code = NotFound desc = could not find container \"f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b\": container with ID starting with f715a01b87235ae23a9658b188be608f40ab3f27c302dfe23ecceacafd3bb36b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.441216 4941 scope.go:117] "RemoveContainer" containerID="3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.441741 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b"} err="failed to get container status \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": rpc error: code = NotFound desc = could not find container \"3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b\": container with ID starting with 3bd0bdb824602a546f3b808e6c8258046de49c928cab692c21d15fee5098368b not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.441765 4941 scope.go:117] "RemoveContainer" containerID="9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442016 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a"} err="failed to get container status \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": rpc error: code = NotFound desc = could not find container \"9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a\": container with ID starting with 9f229961c2703dd9ec9e969cd268be885ba1a49c46dccd89f0e440878b5c024a not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442042 4941 scope.go:117] "RemoveContainer" containerID="64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442287 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9"} err="failed to get container status \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": rpc error: code = NotFound desc = could not find container \"64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9\": container with ID starting with 64d00ab1a68d9fb7df8adf815e29cf2cffaad94045c550c6a3f5020b95760df9 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442324 4941 scope.go:117] "RemoveContainer" containerID="0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442630 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3"} err="failed to get container status \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": rpc error: code = NotFound desc = could not find container \"0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3\": container with ID starting with 0cc7cc4c874f70a703f5f66b96f291f5ee9eaa1321ae82cf99e562489df758b3 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442664 4941 scope.go:117] "RemoveContainer" containerID="b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442961 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69"} err="failed to get container status \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": rpc error: code = NotFound desc = could not find container \"b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69\": container with ID starting with b29d739f78086b37a73b18117fb532eb5f23bfebf19fc2dce30b17b343cf5f69 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.442984 4941 scope.go:117] "RemoveContainer" containerID="2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.443206 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56"} err="failed to get container status \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": rpc error: code = NotFound desc = could not find container \"2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56\": container with ID starting with 2ae9e26997e428dc197ff049a76e96b7e4450accded13eae3a38523d9c17fd56 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.443228 4941 scope.go:117] "RemoveContainer" containerID="83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.443427 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869"} err="failed to get container status \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": rpc error: code = NotFound desc = could not find container \"83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869\": container with ID starting with 83da119cced9e27e72e274e3a005eb7743085f97931e4fddfed84cb16ecdd869 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.443446 4941 scope.go:117] "RemoveContainer" containerID="97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.443835 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d"} err="failed to get container status \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": rpc error: code = NotFound desc = could not find container \"97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d\": container with ID starting with 97d56112c1b339f582dfee04e67605cbc0ffc3f67645e384299a05f39def991d not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.443857 4941 scope.go:117] "RemoveContainer" containerID="b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.444112 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109"} err="failed to get container status \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": rpc error: code = NotFound desc = could not find container \"b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109\": container with ID starting with b2c236a2d73622ff0dbcb33ebb1da07d0ee6b4aa717758f151b0cbef2a26e109 not found: ID does not exist" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.556405 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.559533 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zntd2"] Nov 30 06:58:14 crc kubenswrapper[4941]: I1130 06:58:14.570819 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zntd2"] Nov 30 06:58:14 crc kubenswrapper[4941]: W1130 06:58:14.592406 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod523102da_8e26_4563_8286_41a900e25fb1.slice/crio-7df0956c5e0f5361d3a3bb5fedb338179a727ac2e559f32eadbf353d48094a35 WatchSource:0}: Error finding container 7df0956c5e0f5361d3a3bb5fedb338179a727ac2e559f32eadbf353d48094a35: Status 404 returned error can't find the container with id 7df0956c5e0f5361d3a3bb5fedb338179a727ac2e559f32eadbf353d48094a35 Nov 30 06:58:15 crc kubenswrapper[4941]: I1130 06:58:15.253161 4941 generic.go:334] "Generic (PLEG): container finished" podID="523102da-8e26-4563-8286-41a900e25fb1" containerID="a254a6c6ed64160f92f58065a80a4b9cadcbd965630ee88bbc657cfbfab0946d" exitCode=0 Nov 30 06:58:15 crc kubenswrapper[4941]: I1130 06:58:15.253259 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerDied","Data":"a254a6c6ed64160f92f58065a80a4b9cadcbd965630ee88bbc657cfbfab0946d"} Nov 30 06:58:15 crc kubenswrapper[4941]: I1130 06:58:15.253292 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"7df0956c5e0f5361d3a3bb5fedb338179a727ac2e559f32eadbf353d48094a35"} Nov 30 06:58:15 crc kubenswrapper[4941]: I1130 06:58:15.529396 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6217364-7317-4ee9-957e-9a1764ff0342" path="/var/lib/kubelet/pods/a6217364-7317-4ee9-957e-9a1764ff0342/volumes" Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.261914 4941 generic.go:334] "Generic (PLEG): container finished" podID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerID="d777957b6d1b7e4f8aeeef210dba8e736df953948f01f6f21def5eb95cf63d3e" exitCode=0 Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.261993 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" event={"ID":"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e","Type":"ContainerDied","Data":"d777957b6d1b7e4f8aeeef210dba8e736df953948f01f6f21def5eb95cf63d3e"} Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.267265 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"b53fd75caf88acfbdf1192211610f61b005cbe48d09d2983c8b25e30ee911d6e"} Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.267354 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"12965699bf0b59e43162324f5e1685fd0e0d2d9a9d8075ac2e17471eabb60126"} Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.267367 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"2bff6d27a4a83845b6a90573f47013b59d0a3b4c39d27bbc6e92f28c489130ca"} Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.267378 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"5810ff822b5db49f51e946f5f9871d356e363088ecfc7417d6a6f2cc4cbd3b3a"} Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.267389 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"24c5de62fed367aa589add15d8b5eb4946191322ff22a01bf4bcedf63a791d10"} Nov 30 06:58:16 crc kubenswrapper[4941]: I1130 06:58:16.267400 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"64c95c7c844b94ec5d5e34bea804099e47771d1eecfb8cd780d603af4751f9f1"} Nov 30 06:58:17 crc kubenswrapper[4941]: I1130 06:58:17.275831 4941 generic.go:334] "Generic (PLEG): container finished" podID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerID="f91e465cfbe73a7f37beed02dd2da10a64eaaee939ac26d447ad80e9838aba11" exitCode=0 Nov 30 06:58:17 crc kubenswrapper[4941]: I1130 06:58:17.275897 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" event={"ID":"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e","Type":"ContainerDied","Data":"f91e465cfbe73a7f37beed02dd2da10a64eaaee939ac26d447ad80e9838aba11"} Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.381435 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.518661 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-util\") pod \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.519363 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-bundle\") pod \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.519445 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktw52\" (UniqueName: \"kubernetes.io/projected/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-kube-api-access-ktw52\") pod \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\" (UID: \"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e\") " Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.521483 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-bundle" (OuterVolumeSpecName: "bundle") pod "990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" (UID: "990f638a-8f13-4eaa-8eeb-6f7d1f3f032e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.528890 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-kube-api-access-ktw52" (OuterVolumeSpecName: "kube-api-access-ktw52") pod "990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" (UID: "990f638a-8f13-4eaa-8eeb-6f7d1f3f032e"). InnerVolumeSpecName "kube-api-access-ktw52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.533602 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-util" (OuterVolumeSpecName: "util") pod "990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" (UID: "990f638a-8f13-4eaa-8eeb-6f7d1f3f032e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.620331 4941 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.620594 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktw52\" (UniqueName: \"kubernetes.io/projected/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-kube-api-access-ktw52\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:18 crc kubenswrapper[4941]: I1130 06:58:18.621458 4941 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/990f638a-8f13-4eaa-8eeb-6f7d1f3f032e-util\") on node \"crc\" DevicePath \"\"" Nov 30 06:58:19 crc kubenswrapper[4941]: I1130 06:58:19.305510 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"38ae9bb2df6cdde2aa13082ab210d20bde05d822aeb4ffd4ef96ddc9573cdb0e"} Nov 30 06:58:19 crc kubenswrapper[4941]: I1130 06:58:19.310286 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" event={"ID":"990f638a-8f13-4eaa-8eeb-6f7d1f3f032e","Type":"ContainerDied","Data":"07a39b392ee4b8f05cefd9f60b71b8ed4b9367577be4efe01f62c23efdf8bd3c"} Nov 30 06:58:19 crc kubenswrapper[4941]: I1130 06:58:19.310376 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07a39b392ee4b8f05cefd9f60b71b8ed4b9367577be4efe01f62c23efdf8bd3c" Nov 30 06:58:19 crc kubenswrapper[4941]: I1130 06:58:19.310410 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn" Nov 30 06:58:19 crc kubenswrapper[4941]: I1130 06:58:19.853512 4941 scope.go:117] "RemoveContainer" containerID="72c76c86ebb6e8bb415f9cf7634c48ed75acfe3fda942ca35e3bf70e49ed0ac4" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.319612 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/2.log" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.539949 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt"] Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.540701 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="util" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.540720 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="util" Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.540736 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="pull" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.540742 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="pull" Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.540750 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="extract" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.540757 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="extract" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.540878 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="990f638a-8f13-4eaa-8eeb-6f7d1f3f032e" containerName="extract" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.541250 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.543566 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-pw6l7" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.543677 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.550947 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.564955 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7b8n\" (UniqueName: \"kubernetes.io/projected/abcd3017-8b84-429b-8f2b-aa2137964cb6-kube-api-access-d7b8n\") pod \"nmstate-operator-5b5b58f5c8-vmmqt\" (UID: \"abcd3017-8b84-429b-8f2b-aa2137964cb6\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.666830 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7b8n\" (UniqueName: \"kubernetes.io/projected/abcd3017-8b84-429b-8f2b-aa2137964cb6-kube-api-access-d7b8n\") pod \"nmstate-operator-5b5b58f5c8-vmmqt\" (UID: \"abcd3017-8b84-429b-8f2b-aa2137964cb6\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.704133 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7b8n\" (UniqueName: \"kubernetes.io/projected/abcd3017-8b84-429b-8f2b-aa2137964cb6-kube-api-access-d7b8n\") pod \"nmstate-operator-5b5b58f5c8-vmmqt\" (UID: \"abcd3017-8b84-429b-8f2b-aa2137964cb6\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: I1130 06:58:20.866396 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.910666 4941 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(e073beb8758405c27ba115b9dcaaff7220292d5b174d1bceead61be5fd72e8a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.910749 4941 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(e073beb8758405c27ba115b9dcaaff7220292d5b174d1bceead61be5fd72e8a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.910771 4941 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(e073beb8758405c27ba115b9dcaaff7220292d5b174d1bceead61be5fd72e8a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:20 crc kubenswrapper[4941]: E1130 06:58:20.910824 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate(abcd3017-8b84-429b-8f2b-aa2137964cb6)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate(abcd3017-8b84-429b-8f2b-aa2137964cb6)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(e073beb8758405c27ba115b9dcaaff7220292d5b174d1bceead61be5fd72e8a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" podUID="abcd3017-8b84-429b-8f2b-aa2137964cb6" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.327772 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt"] Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.332025 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.332396 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" event={"ID":"523102da-8e26-4563-8286-41a900e25fb1","Type":"ContainerStarted","Data":"823f42dd665fe1059f2d9aef311aa3ae446d93d6235a72e45f9a8ab46088eaf8"} Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.332687 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.332718 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.332733 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.332805 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.370911 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" podStartSLOduration=7.370874826 podStartE2EDuration="7.370874826s" podCreationTimestamp="2025-11-30 06:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:58:21.369299987 +0000 UTC m=+722.137471606" watchObservedRunningTime="2025-11-30 06:58:21.370874826 +0000 UTC m=+722.139046455" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.380854 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:21 crc kubenswrapper[4941]: I1130 06:58:21.383144 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:21 crc kubenswrapper[4941]: E1130 06:58:21.393094 4941 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(b5ae1dc43b54fd97e545d10146770878b0178a70d8df627a6ef183484b003e19): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 30 06:58:21 crc kubenswrapper[4941]: E1130 06:58:21.393197 4941 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(b5ae1dc43b54fd97e545d10146770878b0178a70d8df627a6ef183484b003e19): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:21 crc kubenswrapper[4941]: E1130 06:58:21.393241 4941 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(b5ae1dc43b54fd97e545d10146770878b0178a70d8df627a6ef183484b003e19): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:21 crc kubenswrapper[4941]: E1130 06:58:21.393313 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate(abcd3017-8b84-429b-8f2b-aa2137964cb6)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate(abcd3017-8b84-429b-8f2b-aa2137964cb6)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(b5ae1dc43b54fd97e545d10146770878b0178a70d8df627a6ef183484b003e19): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" podUID="abcd3017-8b84-429b-8f2b-aa2137964cb6" Nov 30 06:58:28 crc kubenswrapper[4941]: I1130 06:58:28.521388 4941 scope.go:117] "RemoveContainer" containerID="f4933ca1d46a47b00734deaa91d22c2cfee015918f3ff0d5764ac80ce7d6f0f1" Nov 30 06:58:28 crc kubenswrapper[4941]: E1130 06:58:28.522898 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vzc7c_openshift-multus(a2c22971-565b-44b0-9312-737c3931a558)\"" pod="openshift-multus/multus-vzc7c" podUID="a2c22971-565b-44b0-9312-737c3931a558" Nov 30 06:58:32 crc kubenswrapper[4941]: I1130 06:58:32.521589 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:32 crc kubenswrapper[4941]: I1130 06:58:32.522096 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:32 crc kubenswrapper[4941]: E1130 06:58:32.573896 4941 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(2fd149c95074dbd41c0c9c9de84a70fc0bdcf77d4a53fe8165a2a89399de141e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 30 06:58:32 crc kubenswrapper[4941]: E1130 06:58:32.574474 4941 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(2fd149c95074dbd41c0c9c9de84a70fc0bdcf77d4a53fe8165a2a89399de141e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:32 crc kubenswrapper[4941]: E1130 06:58:32.574501 4941 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(2fd149c95074dbd41c0c9c9de84a70fc0bdcf77d4a53fe8165a2a89399de141e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:32 crc kubenswrapper[4941]: E1130 06:58:32.574551 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate(abcd3017-8b84-429b-8f2b-aa2137964cb6)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate(abcd3017-8b84-429b-8f2b-aa2137964cb6)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-5b5b58f5c8-vmmqt_openshift-nmstate_abcd3017-8b84-429b-8f2b-aa2137964cb6_0(2fd149c95074dbd41c0c9c9de84a70fc0bdcf77d4a53fe8165a2a89399de141e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" podUID="abcd3017-8b84-429b-8f2b-aa2137964cb6" Nov 30 06:58:39 crc kubenswrapper[4941]: I1130 06:58:39.527116 4941 scope.go:117] "RemoveContainer" containerID="f4933ca1d46a47b00734deaa91d22c2cfee015918f3ff0d5764ac80ce7d6f0f1" Nov 30 06:58:40 crc kubenswrapper[4941]: I1130 06:58:40.458073 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vzc7c_a2c22971-565b-44b0-9312-737c3931a558/kube-multus/2.log" Nov 30 06:58:40 crc kubenswrapper[4941]: I1130 06:58:40.458661 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vzc7c" event={"ID":"a2c22971-565b-44b0-9312-737c3931a558","Type":"ContainerStarted","Data":"7e341c85a10421a254a92fefa3a2e88cf89bdd6a8420a50c00b2b3badee2c8c7"} Nov 30 06:58:44 crc kubenswrapper[4941]: I1130 06:58:44.588086 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-ns6d8" Nov 30 06:58:47 crc kubenswrapper[4941]: I1130 06:58:47.521277 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:47 crc kubenswrapper[4941]: I1130 06:58:47.523144 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" Nov 30 06:58:47 crc kubenswrapper[4941]: I1130 06:58:47.721826 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt"] Nov 30 06:58:48 crc kubenswrapper[4941]: I1130 06:58:48.509397 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" event={"ID":"abcd3017-8b84-429b-8f2b-aa2137964cb6","Type":"ContainerStarted","Data":"c0cced9cebf8d1399facddf5c8be29ceaeae3243b8500d8be9f54a94289e2ca3"} Nov 30 06:58:50 crc kubenswrapper[4941]: I1130 06:58:50.520142 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" event={"ID":"abcd3017-8b84-429b-8f2b-aa2137964cb6","Type":"ContainerStarted","Data":"f8fa5681e992a84638a2ff69e01fadd134355c3d2d5381e47edf09bde90082d6"} Nov 30 06:58:50 crc kubenswrapper[4941]: I1130 06:58:50.536263 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-vmmqt" podStartSLOduration=28.485957433 podStartE2EDuration="30.536244819s" podCreationTimestamp="2025-11-30 06:58:20 +0000 UTC" firstStartedPulling="2025-11-30 06:58:47.730478637 +0000 UTC m=+748.498650266" lastFinishedPulling="2025-11-30 06:58:49.780766043 +0000 UTC m=+750.548937652" observedRunningTime="2025-11-30 06:58:50.536168547 +0000 UTC m=+751.304340156" watchObservedRunningTime="2025-11-30 06:58:50.536244819 +0000 UTC m=+751.304416428" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.616244 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.617280 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.620770 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.621731 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.622516 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-rwnss" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.624884 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.665386 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-5t7sr"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.667164 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.680811 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.691542 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.742287 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.742921 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.744969 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.745132 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-cj2q5" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.745227 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.756754 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777412 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gpvq\" (UniqueName: \"kubernetes.io/projected/1d0ef5c7-2b43-4e45-979d-de23cce56371-kube-api-access-7gpvq\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777453 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx8x6\" (UniqueName: \"kubernetes.io/projected/d958a2dd-c486-4357-8ac9-a808e3474e65-kube-api-access-xx8x6\") pod \"nmstate-metrics-7f946cbc9-vnjxd\" (UID: \"d958a2dd-c486-4357-8ac9-a808e3474e65\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777478 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-nmstate-lock\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777499 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92xv5\" (UniqueName: \"kubernetes.io/projected/be9045de-8bc2-4342-bb17-345c07c16d74-kube-api-access-92xv5\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777516 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-ovs-socket\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777537 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1d0ef5c7-2b43-4e45-979d-de23cce56371-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.777563 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-dbus-socket\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878392 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4rcd\" (UniqueName: \"kubernetes.io/projected/5b760607-de49-4963-b72b-5bc4ff0f41b7-kube-api-access-s4rcd\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878446 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5b760607-de49-4963-b72b-5bc4ff0f41b7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878505 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gpvq\" (UniqueName: \"kubernetes.io/projected/1d0ef5c7-2b43-4e45-979d-de23cce56371-kube-api-access-7gpvq\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878533 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx8x6\" (UniqueName: \"kubernetes.io/projected/d958a2dd-c486-4357-8ac9-a808e3474e65-kube-api-access-xx8x6\") pod \"nmstate-metrics-7f946cbc9-vnjxd\" (UID: \"d958a2dd-c486-4357-8ac9-a808e3474e65\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878567 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-nmstate-lock\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878609 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-nmstate-lock\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878686 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92xv5\" (UniqueName: \"kubernetes.io/projected/be9045de-8bc2-4342-bb17-345c07c16d74-kube-api-access-92xv5\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878743 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-ovs-socket\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878807 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1d0ef5c7-2b43-4e45-979d-de23cce56371-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878864 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-ovs-socket\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878881 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-dbus-socket\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.878931 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5b760607-de49-4963-b72b-5bc4ff0f41b7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: E1130 06:58:51.878978 4941 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Nov 30 06:58:51 crc kubenswrapper[4941]: E1130 06:58:51.879043 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1d0ef5c7-2b43-4e45-979d-de23cce56371-tls-key-pair podName:1d0ef5c7-2b43-4e45-979d-de23cce56371 nodeName:}" failed. No retries permitted until 2025-11-30 06:58:52.379023284 +0000 UTC m=+753.147194903 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/1d0ef5c7-2b43-4e45-979d-de23cce56371-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-w527h" (UID: "1d0ef5c7-2b43-4e45-979d-de23cce56371") : secret "openshift-nmstate-webhook" not found Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.879188 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/be9045de-8bc2-4342-bb17-345c07c16d74-dbus-socket\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.901063 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92xv5\" (UniqueName: \"kubernetes.io/projected/be9045de-8bc2-4342-bb17-345c07c16d74-kube-api-access-92xv5\") pod \"nmstate-handler-5t7sr\" (UID: \"be9045de-8bc2-4342-bb17-345c07c16d74\") " pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.907274 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx8x6\" (UniqueName: \"kubernetes.io/projected/d958a2dd-c486-4357-8ac9-a808e3474e65-kube-api-access-xx8x6\") pod \"nmstate-metrics-7f946cbc9-vnjxd\" (UID: \"d958a2dd-c486-4357-8ac9-a808e3474e65\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.917593 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gpvq\" (UniqueName: \"kubernetes.io/projected/1d0ef5c7-2b43-4e45-979d-de23cce56371-kube-api-access-7gpvq\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.925801 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5b666dd7f6-2wdd4"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.926730 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.940461 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.945775 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b666dd7f6-2wdd4"] Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.981015 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5b760607-de49-4963-b72b-5bc4ff0f41b7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.981078 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4rcd\" (UniqueName: \"kubernetes.io/projected/5b760607-de49-4963-b72b-5bc4ff0f41b7-kube-api-access-s4rcd\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.981099 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5b760607-de49-4963-b72b-5bc4ff0f41b7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.984954 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/5b760607-de49-4963-b72b-5bc4ff0f41b7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.992820 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.994592 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5b760607-de49-4963-b72b-5bc4ff0f41b7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:51 crc kubenswrapper[4941]: I1130 06:58:51.997452 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4rcd\" (UniqueName: \"kubernetes.io/projected/5b760607-de49-4963-b72b-5bc4ff0f41b7-kube-api-access-s4rcd\") pod \"nmstate-console-plugin-7fbb5f6569-f7frt\" (UID: \"5b760607-de49-4963-b72b-5bc4ff0f41b7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.062215 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081761 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-serving-cert\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081807 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-service-ca\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081885 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-oauth-config\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081915 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-config\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081937 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-trusted-ca-bundle\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081970 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-oauth-serving-cert\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.081994 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhvxw\" (UniqueName: \"kubernetes.io/projected/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-kube-api-access-dhvxw\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.156744 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd"] Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183300 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhvxw\" (UniqueName: \"kubernetes.io/projected/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-kube-api-access-dhvxw\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183373 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-serving-cert\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183397 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-service-ca\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183473 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-oauth-config\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183497 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-config\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183514 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-trusted-ca-bundle\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.183538 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-oauth-serving-cert\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.185372 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-service-ca\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.186524 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-config\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.186742 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-oauth-serving-cert\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.186931 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-trusted-ca-bundle\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.189114 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-oauth-config\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.189160 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-console-serving-cert\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.197816 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhvxw\" (UniqueName: \"kubernetes.io/projected/cf843aaf-b3d1-41f0-89e9-b3b4b519f08a-kube-api-access-dhvxw\") pod \"console-5b666dd7f6-2wdd4\" (UID: \"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a\") " pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.262922 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt"] Nov 30 06:58:52 crc kubenswrapper[4941]: W1130 06:58:52.267697 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b760607_de49_4963_b72b_5bc4ff0f41b7.slice/crio-3b1abcb96e024a630db86628183890c7bb30d57be701d9d72bc1c720c19bc789 WatchSource:0}: Error finding container 3b1abcb96e024a630db86628183890c7bb30d57be701d9d72bc1c720c19bc789: Status 404 returned error can't find the container with id 3b1abcb96e024a630db86628183890c7bb30d57be701d9d72bc1c720c19bc789 Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.318199 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.386338 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1d0ef5c7-2b43-4e45-979d-de23cce56371-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.391015 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1d0ef5c7-2b43-4e45-979d-de23cce56371-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-w527h\" (UID: \"1d0ef5c7-2b43-4e45-979d-de23cce56371\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.491175 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b666dd7f6-2wdd4"] Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.530172 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" event={"ID":"d958a2dd-c486-4357-8ac9-a808e3474e65","Type":"ContainerStarted","Data":"ee5a5f78c74ee17e4c0f5cb8d7df668f83ed25a3fb7b66a37dab45af5e9c680f"} Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.531473 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b666dd7f6-2wdd4" event={"ID":"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a","Type":"ContainerStarted","Data":"0eda700b45a23b7ebe5d4699afee24d202d30401e5fa83f5b98a483bfb74f952"} Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.532790 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-5t7sr" event={"ID":"be9045de-8bc2-4342-bb17-345c07c16d74","Type":"ContainerStarted","Data":"c3963ac63061142140a69c7ee3233fb3493d0ff0a9c20bfeb39beaf60d9b804e"} Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.533796 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" event={"ID":"5b760607-de49-4963-b72b-5bc4ff0f41b7","Type":"ContainerStarted","Data":"3b1abcb96e024a630db86628183890c7bb30d57be701d9d72bc1c720c19bc789"} Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.560128 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:52 crc kubenswrapper[4941]: I1130 06:58:52.753561 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h"] Nov 30 06:58:53 crc kubenswrapper[4941]: I1130 06:58:53.540941 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b666dd7f6-2wdd4" event={"ID":"cf843aaf-b3d1-41f0-89e9-b3b4b519f08a","Type":"ContainerStarted","Data":"7a8cd19f7eaa656182d10e4d44e0db1c76f48984ea806ac2e6d8aa14cc03bb01"} Nov 30 06:58:53 crc kubenswrapper[4941]: I1130 06:58:53.542972 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" event={"ID":"1d0ef5c7-2b43-4e45-979d-de23cce56371","Type":"ContainerStarted","Data":"55ff1625e9ae0dd38b5d07cf8c99f9b1b9deed03e3c3ebf7f6f2d244699c4ff5"} Nov 30 06:58:53 crc kubenswrapper[4941]: I1130 06:58:53.561022 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5b666dd7f6-2wdd4" podStartSLOduration=2.5610008669999997 podStartE2EDuration="2.561000867s" podCreationTimestamp="2025-11-30 06:58:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 06:58:53.558733726 +0000 UTC m=+754.326905345" watchObservedRunningTime="2025-11-30 06:58:53.561000867 +0000 UTC m=+754.329172476" Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.557487 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" event={"ID":"1d0ef5c7-2b43-4e45-979d-de23cce56371","Type":"ContainerStarted","Data":"0e3d31a92be926ee9acfb61189f6f8f5c572d0d1ffb807fcc8615c3c7873a35b"} Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.557911 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.560435 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" event={"ID":"d958a2dd-c486-4357-8ac9-a808e3474e65","Type":"ContainerStarted","Data":"db0ec3ee71199bc338a4c64463e66532c2fc75b6c55790a06073cb87161676f7"} Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.562002 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-5t7sr" event={"ID":"be9045de-8bc2-4342-bb17-345c07c16d74","Type":"ContainerStarted","Data":"981ed0ea2799e7dfdb6baf48f9ea2630f6eb49765f0742138826f0bbea996a8d"} Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.562131 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.563605 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" event={"ID":"5b760607-de49-4963-b72b-5bc4ff0f41b7","Type":"ContainerStarted","Data":"b8f63159da2234f83e06fafbf86a692648828af28430722792cb5a11fb4a0ce9"} Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.576557 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" podStartSLOduration=2.166496812 podStartE2EDuration="4.576536912s" podCreationTimestamp="2025-11-30 06:58:51 +0000 UTC" firstStartedPulling="2025-11-30 06:58:52.761732805 +0000 UTC m=+753.529904434" lastFinishedPulling="2025-11-30 06:58:55.171772925 +0000 UTC m=+755.939944534" observedRunningTime="2025-11-30 06:58:55.57361765 +0000 UTC m=+756.341789289" watchObservedRunningTime="2025-11-30 06:58:55.576536912 +0000 UTC m=+756.344708551" Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.595737 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-5t7sr" podStartSLOduration=1.44562123 podStartE2EDuration="4.595712513s" podCreationTimestamp="2025-11-30 06:58:51 +0000 UTC" firstStartedPulling="2025-11-30 06:58:52.024598504 +0000 UTC m=+752.792770113" lastFinishedPulling="2025-11-30 06:58:55.174689787 +0000 UTC m=+755.942861396" observedRunningTime="2025-11-30 06:58:55.59050321 +0000 UTC m=+756.358674849" watchObservedRunningTime="2025-11-30 06:58:55.595712513 +0000 UTC m=+756.363884132" Nov 30 06:58:55 crc kubenswrapper[4941]: I1130 06:58:55.611559 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-f7frt" podStartSLOduration=1.7093057900000002 podStartE2EDuration="4.61153427s" podCreationTimestamp="2025-11-30 06:58:51 +0000 UTC" firstStartedPulling="2025-11-30 06:58:52.270520235 +0000 UTC m=+753.038691844" lastFinishedPulling="2025-11-30 06:58:55.172748675 +0000 UTC m=+755.940920324" observedRunningTime="2025-11-30 06:58:55.605937054 +0000 UTC m=+756.374108703" watchObservedRunningTime="2025-11-30 06:58:55.61153427 +0000 UTC m=+756.379705899" Nov 30 06:58:57 crc kubenswrapper[4941]: I1130 06:58:57.580776 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" event={"ID":"d958a2dd-c486-4357-8ac9-a808e3474e65","Type":"ContainerStarted","Data":"0dbf19ecd6ffc468885f6a57b5f6582b70b1fa6839b106e1fce438a9ce680e88"} Nov 30 06:58:57 crc kubenswrapper[4941]: I1130 06:58:57.599799 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-vnjxd" podStartSLOduration=1.342870987 podStartE2EDuration="6.599778848s" podCreationTimestamp="2025-11-30 06:58:51 +0000 UTC" firstStartedPulling="2025-11-30 06:58:52.177640229 +0000 UTC m=+752.945811838" lastFinishedPulling="2025-11-30 06:58:57.43454809 +0000 UTC m=+758.202719699" observedRunningTime="2025-11-30 06:58:57.595733531 +0000 UTC m=+758.363905150" watchObservedRunningTime="2025-11-30 06:58:57.599778848 +0000 UTC m=+758.367950457" Nov 30 06:59:01 crc kubenswrapper[4941]: I1130 06:59:01.140379 4941 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 30 06:59:02 crc kubenswrapper[4941]: I1130 06:59:02.032221 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-5t7sr" Nov 30 06:59:02 crc kubenswrapper[4941]: I1130 06:59:02.318866 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:59:02 crc kubenswrapper[4941]: I1130 06:59:02.318918 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:59:02 crc kubenswrapper[4941]: I1130 06:59:02.329132 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:59:02 crc kubenswrapper[4941]: I1130 06:59:02.613387 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5b666dd7f6-2wdd4" Nov 30 06:59:02 crc kubenswrapper[4941]: I1130 06:59:02.662753 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-css84"] Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.689188 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q7c9g"] Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.692872 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.710947 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7c9g"] Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.874688 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bff84\" (UniqueName: \"kubernetes.io/projected/8a521c2f-bb16-4ed8-a034-f638ad841c1d-kube-api-access-bff84\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.874875 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-catalog-content\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.875186 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-utilities\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.976481 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bff84\" (UniqueName: \"kubernetes.io/projected/8a521c2f-bb16-4ed8-a034-f638ad841c1d-kube-api-access-bff84\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.976549 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-catalog-content\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.976609 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-utilities\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.977234 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-utilities\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:11 crc kubenswrapper[4941]: I1130 06:59:11.977297 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-catalog-content\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.001256 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bff84\" (UniqueName: \"kubernetes.io/projected/8a521c2f-bb16-4ed8-a034-f638ad841c1d-kube-api-access-bff84\") pod \"redhat-marketplace-q7c9g\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.029225 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.241255 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7c9g"] Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.565157 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-w527h" Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.676578 4941 generic.go:334] "Generic (PLEG): container finished" podID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerID="b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14" exitCode=0 Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.676631 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7c9g" event={"ID":"8a521c2f-bb16-4ed8-a034-f638ad841c1d","Type":"ContainerDied","Data":"b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14"} Nov 30 06:59:12 crc kubenswrapper[4941]: I1130 06:59:12.676660 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7c9g" event={"ID":"8a521c2f-bb16-4ed8-a034-f638ad841c1d","Type":"ContainerStarted","Data":"3ae325a7324d4ee38895a27f9f6afad7873b8e6cb44798d5b268ba3a26113c30"} Nov 30 06:59:13 crc kubenswrapper[4941]: I1130 06:59:13.687096 4941 generic.go:334] "Generic (PLEG): container finished" podID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerID="1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39" exitCode=0 Nov 30 06:59:13 crc kubenswrapper[4941]: I1130 06:59:13.687160 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7c9g" event={"ID":"8a521c2f-bb16-4ed8-a034-f638ad841c1d","Type":"ContainerDied","Data":"1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39"} Nov 30 06:59:14 crc kubenswrapper[4941]: I1130 06:59:14.697541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7c9g" event={"ID":"8a521c2f-bb16-4ed8-a034-f638ad841c1d","Type":"ContainerStarted","Data":"6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f"} Nov 30 06:59:14 crc kubenswrapper[4941]: I1130 06:59:14.714968 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q7c9g" podStartSLOduration=2.161153873 podStartE2EDuration="3.714947842s" podCreationTimestamp="2025-11-30 06:59:11 +0000 UTC" firstStartedPulling="2025-11-30 06:59:12.678291544 +0000 UTC m=+773.446463153" lastFinishedPulling="2025-11-30 06:59:14.232085503 +0000 UTC m=+775.000257122" observedRunningTime="2025-11-30 06:59:14.71396135 +0000 UTC m=+775.482132959" watchObservedRunningTime="2025-11-30 06:59:14.714947842 +0000 UTC m=+775.483119461" Nov 30 06:59:22 crc kubenswrapper[4941]: I1130 06:59:22.030163 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:22 crc kubenswrapper[4941]: I1130 06:59:22.030996 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:22 crc kubenswrapper[4941]: I1130 06:59:22.084782 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:22 crc kubenswrapper[4941]: I1130 06:59:22.779937 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:22 crc kubenswrapper[4941]: I1130 06:59:22.816831 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7c9g"] Nov 30 06:59:24 crc kubenswrapper[4941]: I1130 06:59:24.752156 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q7c9g" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="registry-server" containerID="cri-o://6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f" gracePeriod=2 Nov 30 06:59:24 crc kubenswrapper[4941]: I1130 06:59:24.961835 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x"] Nov 30 06:59:24 crc kubenswrapper[4941]: I1130 06:59:24.964418 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:24 crc kubenswrapper[4941]: I1130 06:59:24.967943 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 30 06:59:24 crc kubenswrapper[4941]: I1130 06:59:24.975249 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x"] Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.148825 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.148901 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9gjj\" (UniqueName: \"kubernetes.io/projected/9044019d-a819-45e8-85ff-263b655f3af6-kube-api-access-w9gjj\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.149062 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.249971 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9gjj\" (UniqueName: \"kubernetes.io/projected/9044019d-a819-45e8-85ff-263b655f3af6-kube-api-access-w9gjj\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.250096 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.250199 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.250782 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.250817 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.278243 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9gjj\" (UniqueName: \"kubernetes.io/projected/9044019d-a819-45e8-85ff-263b655f3af6-kube-api-access-w9gjj\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.330796 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.598386 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.719289 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x"] Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.757214 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-utilities\") pod \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.757290 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bff84\" (UniqueName: \"kubernetes.io/projected/8a521c2f-bb16-4ed8-a034-f638ad841c1d-kube-api-access-bff84\") pod \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.757405 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-catalog-content\") pod \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\" (UID: \"8a521c2f-bb16-4ed8-a034-f638ad841c1d\") " Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.759058 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-utilities" (OuterVolumeSpecName: "utilities") pod "8a521c2f-bb16-4ed8-a034-f638ad841c1d" (UID: "8a521c2f-bb16-4ed8-a034-f638ad841c1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.760175 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" event={"ID":"9044019d-a819-45e8-85ff-263b655f3af6","Type":"ContainerStarted","Data":"47b218ca853ef8f6b292d360b7a5a5e5c8de5c3f66f88401f6ddf8b2c545bace"} Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.762108 4941 generic.go:334] "Generic (PLEG): container finished" podID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerID="6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f" exitCode=0 Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.762131 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7c9g" event={"ID":"8a521c2f-bb16-4ed8-a034-f638ad841c1d","Type":"ContainerDied","Data":"6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f"} Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.762146 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q7c9g" event={"ID":"8a521c2f-bb16-4ed8-a034-f638ad841c1d","Type":"ContainerDied","Data":"3ae325a7324d4ee38895a27f9f6afad7873b8e6cb44798d5b268ba3a26113c30"} Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.762161 4941 scope.go:117] "RemoveContainer" containerID="6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.762159 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a521c2f-bb16-4ed8-a034-f638ad841c1d-kube-api-access-bff84" (OuterVolumeSpecName: "kube-api-access-bff84") pod "8a521c2f-bb16-4ed8-a034-f638ad841c1d" (UID: "8a521c2f-bb16-4ed8-a034-f638ad841c1d"). InnerVolumeSpecName "kube-api-access-bff84". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.762261 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q7c9g" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.774144 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a521c2f-bb16-4ed8-a034-f638ad841c1d" (UID: "8a521c2f-bb16-4ed8-a034-f638ad841c1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.779867 4941 scope.go:117] "RemoveContainer" containerID="1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.805879 4941 scope.go:117] "RemoveContainer" containerID="b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.819875 4941 scope.go:117] "RemoveContainer" containerID="6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f" Nov 30 06:59:25 crc kubenswrapper[4941]: E1130 06:59:25.821223 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f\": container with ID starting with 6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f not found: ID does not exist" containerID="6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.821395 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f"} err="failed to get container status \"6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f\": rpc error: code = NotFound desc = could not find container \"6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f\": container with ID starting with 6fb87f207e06567fbbde87cfa2d66160ea6b30a65321597e658d4ba4a57b243f not found: ID does not exist" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.821477 4941 scope.go:117] "RemoveContainer" containerID="1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39" Nov 30 06:59:25 crc kubenswrapper[4941]: E1130 06:59:25.821925 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39\": container with ID starting with 1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39 not found: ID does not exist" containerID="1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.821990 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39"} err="failed to get container status \"1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39\": rpc error: code = NotFound desc = could not find container \"1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39\": container with ID starting with 1355c93f66b82f0322fc6c2ffcd1c9a818af86ef25199fd5a3b1310964313c39 not found: ID does not exist" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.822023 4941 scope.go:117] "RemoveContainer" containerID="b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14" Nov 30 06:59:25 crc kubenswrapper[4941]: E1130 06:59:25.822296 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14\": container with ID starting with b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14 not found: ID does not exist" containerID="b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.822324 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14"} err="failed to get container status \"b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14\": rpc error: code = NotFound desc = could not find container \"b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14\": container with ID starting with b5b653270dfc09add73b0b1008d6283006fc498d9cb7e12e774f08352cd92f14 not found: ID does not exist" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.858564 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bff84\" (UniqueName: \"kubernetes.io/projected/8a521c2f-bb16-4ed8-a034-f638ad841c1d-kube-api-access-bff84\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.858600 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:25 crc kubenswrapper[4941]: I1130 06:59:25.858611 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a521c2f-bb16-4ed8-a034-f638ad841c1d-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:26 crc kubenswrapper[4941]: I1130 06:59:26.102371 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7c9g"] Nov 30 06:59:26 crc kubenswrapper[4941]: I1130 06:59:26.105558 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q7c9g"] Nov 30 06:59:26 crc kubenswrapper[4941]: I1130 06:59:26.768380 4941 generic.go:334] "Generic (PLEG): container finished" podID="9044019d-a819-45e8-85ff-263b655f3af6" containerID="7585240f528d99a8e9f276afd59f7bfa7e1c3808e23be7b72466d2d0b6ef82c4" exitCode=0 Nov 30 06:59:26 crc kubenswrapper[4941]: I1130 06:59:26.768455 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" event={"ID":"9044019d-a819-45e8-85ff-263b655f3af6","Type":"ContainerDied","Data":"7585240f528d99a8e9f276afd59f7bfa7e1c3808e23be7b72466d2d0b6ef82c4"} Nov 30 06:59:27 crc kubenswrapper[4941]: I1130 06:59:27.529963 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" path="/var/lib/kubelet/pods/8a521c2f-bb16-4ed8-a034-f638ad841c1d/volumes" Nov 30 06:59:27 crc kubenswrapper[4941]: I1130 06:59:27.704193 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-css84" podUID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" containerName="console" containerID="cri-o://7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed" gracePeriod=15 Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.033742 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-css84_df9aa967-eec9-4ce5-9c64-edff3aedca4a/console/0.log" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.033817 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085041 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-serving-cert\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085104 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-oauth-config\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085135 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-trusted-ca-bundle\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085162 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-service-ca\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085194 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-oauth-serving-cert\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085219 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-config\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.085260 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7vtg\" (UniqueName: \"kubernetes.io/projected/df9aa967-eec9-4ce5-9c64-edff3aedca4a-kube-api-access-c7vtg\") pod \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\" (UID: \"df9aa967-eec9-4ce5-9c64-edff3aedca4a\") " Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.086152 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.086177 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.086197 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-service-ca" (OuterVolumeSpecName: "service-ca") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.086255 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-config" (OuterVolumeSpecName: "console-config") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.091300 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df9aa967-eec9-4ce5-9c64-edff3aedca4a-kube-api-access-c7vtg" (OuterVolumeSpecName: "kube-api-access-c7vtg") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "kube-api-access-c7vtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.091368 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.091489 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "df9aa967-eec9-4ce5-9c64-edff3aedca4a" (UID: "df9aa967-eec9-4ce5-9c64-edff3aedca4a"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.135464 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n5jmg"] Nov 30 06:59:28 crc kubenswrapper[4941]: E1130 06:59:28.138404 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="registry-server" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.138438 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="registry-server" Nov 30 06:59:28 crc kubenswrapper[4941]: E1130 06:59:28.138454 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" containerName="console" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.138462 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" containerName="console" Nov 30 06:59:28 crc kubenswrapper[4941]: E1130 06:59:28.138472 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="extract-utilities" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.138721 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="extract-utilities" Nov 30 06:59:28 crc kubenswrapper[4941]: E1130 06:59:28.138756 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="extract-content" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.138766 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="extract-content" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.140739 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a521c2f-bb16-4ed8-a034-f638ad841c1d" containerName="registry-server" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.141128 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" containerName="console" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.141995 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n5jmg"] Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.142138 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186504 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7xpd\" (UniqueName: \"kubernetes.io/projected/68192767-bd6a-4ea9-a040-902042ad77b2-kube-api-access-b7xpd\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186637 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-utilities\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186714 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-catalog-content\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186853 4941 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186877 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7vtg\" (UniqueName: \"kubernetes.io/projected/df9aa967-eec9-4ce5-9c64-edff3aedca4a-kube-api-access-c7vtg\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186890 4941 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186904 4941 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/df9aa967-eec9-4ce5-9c64-edff3aedca4a-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186916 4941 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186925 4941 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-service-ca\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.186935 4941 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/df9aa967-eec9-4ce5-9c64-edff3aedca4a-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.287526 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7xpd\" (UniqueName: \"kubernetes.io/projected/68192767-bd6a-4ea9-a040-902042ad77b2-kube-api-access-b7xpd\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.287804 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-utilities\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.287886 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-catalog-content\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.288257 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-utilities\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.288266 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-catalog-content\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.311327 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7xpd\" (UniqueName: \"kubernetes.io/projected/68192767-bd6a-4ea9-a040-902042ad77b2-kube-api-access-b7xpd\") pod \"redhat-operators-n5jmg\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.473424 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.790578 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-css84_df9aa967-eec9-4ce5-9c64-edff3aedca4a/console/0.log" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.791021 4941 generic.go:334] "Generic (PLEG): container finished" podID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" containerID="7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed" exitCode=2 Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.791114 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-css84" event={"ID":"df9aa967-eec9-4ce5-9c64-edff3aedca4a","Type":"ContainerDied","Data":"7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed"} Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.791114 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-css84" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.791145 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-css84" event={"ID":"df9aa967-eec9-4ce5-9c64-edff3aedca4a","Type":"ContainerDied","Data":"62503a9f9529cef2a6a5b2e490eaa9f2984b565209e1c0cecd8e6c48bda45d36"} Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.791165 4941 scope.go:117] "RemoveContainer" containerID="7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.796116 4941 generic.go:334] "Generic (PLEG): container finished" podID="9044019d-a819-45e8-85ff-263b655f3af6" containerID="0dd2d385ad1e644a1938bd9f1faa3e704647d754efb0b79523fcf970afa6e688" exitCode=0 Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.796150 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" event={"ID":"9044019d-a819-45e8-85ff-263b655f3af6","Type":"ContainerDied","Data":"0dd2d385ad1e644a1938bd9f1faa3e704647d754efb0b79523fcf970afa6e688"} Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.844656 4941 scope.go:117] "RemoveContainer" containerID="7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed" Nov 30 06:59:28 crc kubenswrapper[4941]: E1130 06:59:28.847266 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed\": container with ID starting with 7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed not found: ID does not exist" containerID="7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.847305 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed"} err="failed to get container status \"7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed\": rpc error: code = NotFound desc = could not find container \"7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed\": container with ID starting with 7686337fe5398cde5db1067826ef3470826f7d28853d3b057dcf4ad336355fed not found: ID does not exist" Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.890451 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-css84"] Nov 30 06:59:28 crc kubenswrapper[4941]: I1130 06:59:28.895143 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-css84"] Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.021379 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n5jmg"] Nov 30 06:59:29 crc kubenswrapper[4941]: W1130 06:59:29.025592 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68192767_bd6a_4ea9_a040_902042ad77b2.slice/crio-acd8a61dd964e8810691e9b45537a5b9343e7305844594a39486449bf95f6b6c WatchSource:0}: Error finding container acd8a61dd964e8810691e9b45537a5b9343e7305844594a39486449bf95f6b6c: Status 404 returned error can't find the container with id acd8a61dd964e8810691e9b45537a5b9343e7305844594a39486449bf95f6b6c Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.528213 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df9aa967-eec9-4ce5-9c64-edff3aedca4a" path="/var/lib/kubelet/pods/df9aa967-eec9-4ce5-9c64-edff3aedca4a/volumes" Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.803490 4941 generic.go:334] "Generic (PLEG): container finished" podID="9044019d-a819-45e8-85ff-263b655f3af6" containerID="629bb9b4ebe0fb8caf4b400c04b92e1b79eaf51e6cadf1589e8ead8c90a97e8c" exitCode=0 Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.803576 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" event={"ID":"9044019d-a819-45e8-85ff-263b655f3af6","Type":"ContainerDied","Data":"629bb9b4ebe0fb8caf4b400c04b92e1b79eaf51e6cadf1589e8ead8c90a97e8c"} Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.804962 4941 generic.go:334] "Generic (PLEG): container finished" podID="68192767-bd6a-4ea9-a040-902042ad77b2" containerID="c2ca02854e46098a5b430797ecb7d1c781c1b12275988cc0c5059dc63822d49f" exitCode=0 Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.804995 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerDied","Data":"c2ca02854e46098a5b430797ecb7d1c781c1b12275988cc0c5059dc63822d49f"} Nov 30 06:59:29 crc kubenswrapper[4941]: I1130 06:59:29.805012 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerStarted","Data":"acd8a61dd964e8810691e9b45537a5b9343e7305844594a39486449bf95f6b6c"} Nov 30 06:59:30 crc kubenswrapper[4941]: I1130 06:59:30.812572 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerStarted","Data":"6335ac6b31978e2d2dc814d5c84c0c2458786ba453c128b5d80982cddc68c042"} Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.037409 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.040281 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-bundle\") pod \"9044019d-a819-45e8-85ff-263b655f3af6\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.040348 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-util\") pod \"9044019d-a819-45e8-85ff-263b655f3af6\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.040374 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9gjj\" (UniqueName: \"kubernetes.io/projected/9044019d-a819-45e8-85ff-263b655f3af6-kube-api-access-w9gjj\") pod \"9044019d-a819-45e8-85ff-263b655f3af6\" (UID: \"9044019d-a819-45e8-85ff-263b655f3af6\") " Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.041756 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-bundle" (OuterVolumeSpecName: "bundle") pod "9044019d-a819-45e8-85ff-263b655f3af6" (UID: "9044019d-a819-45e8-85ff-263b655f3af6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.048760 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9044019d-a819-45e8-85ff-263b655f3af6-kube-api-access-w9gjj" (OuterVolumeSpecName: "kube-api-access-w9gjj") pod "9044019d-a819-45e8-85ff-263b655f3af6" (UID: "9044019d-a819-45e8-85ff-263b655f3af6"). InnerVolumeSpecName "kube-api-access-w9gjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.141698 4941 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.141731 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9gjj\" (UniqueName: \"kubernetes.io/projected/9044019d-a819-45e8-85ff-263b655f3af6-kube-api-access-w9gjj\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.379182 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-util" (OuterVolumeSpecName: "util") pod "9044019d-a819-45e8-85ff-263b655f3af6" (UID: "9044019d-a819-45e8-85ff-263b655f3af6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.445913 4941 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9044019d-a819-45e8-85ff-263b655f3af6-util\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.820441 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.820449 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x" event={"ID":"9044019d-a819-45e8-85ff-263b655f3af6","Type":"ContainerDied","Data":"47b218ca853ef8f6b292d360b7a5a5e5c8de5c3f66f88401f6ddf8b2c545bace"} Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.821682 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47b218ca853ef8f6b292d360b7a5a5e5c8de5c3f66f88401f6ddf8b2c545bace" Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.824865 4941 generic.go:334] "Generic (PLEG): container finished" podID="68192767-bd6a-4ea9-a040-902042ad77b2" containerID="6335ac6b31978e2d2dc814d5c84c0c2458786ba453c128b5d80982cddc68c042" exitCode=0 Nov 30 06:59:31 crc kubenswrapper[4941]: I1130 06:59:31.824919 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerDied","Data":"6335ac6b31978e2d2dc814d5c84c0c2458786ba453c128b5d80982cddc68c042"} Nov 30 06:59:32 crc kubenswrapper[4941]: I1130 06:59:32.833288 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerStarted","Data":"98f11276819242b37e2eaf35837fd6415b2448a4bf46105bd0008b5b6917d5c9"} Nov 30 06:59:32 crc kubenswrapper[4941]: I1130 06:59:32.858745 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n5jmg" podStartSLOduration=2.389708507 podStartE2EDuration="4.858726579s" podCreationTimestamp="2025-11-30 06:59:28 +0000 UTC" firstStartedPulling="2025-11-30 06:59:29.806060755 +0000 UTC m=+790.574232364" lastFinishedPulling="2025-11-30 06:59:32.275078827 +0000 UTC m=+793.043250436" observedRunningTime="2025-11-30 06:59:32.85734041 +0000 UTC m=+793.625512029" watchObservedRunningTime="2025-11-30 06:59:32.858726579 +0000 UTC m=+793.626898198" Nov 30 06:59:32 crc kubenswrapper[4941]: I1130 06:59:32.978865 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 06:59:32 crc kubenswrapper[4941]: I1130 06:59:32.978926 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 06:59:38 crc kubenswrapper[4941]: I1130 06:59:38.473598 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:38 crc kubenswrapper[4941]: I1130 06:59:38.475141 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:38 crc kubenswrapper[4941]: I1130 06:59:38.547061 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:38 crc kubenswrapper[4941]: I1130 06:59:38.934991 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:40 crc kubenswrapper[4941]: I1130 06:59:40.717418 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n5jmg"] Nov 30 06:59:41 crc kubenswrapper[4941]: I1130 06:59:41.902718 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n5jmg" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="registry-server" containerID="cri-o://98f11276819242b37e2eaf35837fd6415b2448a4bf46105bd0008b5b6917d5c9" gracePeriod=2 Nov 30 06:59:42 crc kubenswrapper[4941]: I1130 06:59:42.914940 4941 generic.go:334] "Generic (PLEG): container finished" podID="68192767-bd6a-4ea9-a040-902042ad77b2" containerID="98f11276819242b37e2eaf35837fd6415b2448a4bf46105bd0008b5b6917d5c9" exitCode=0 Nov 30 06:59:42 crc kubenswrapper[4941]: I1130 06:59:42.914988 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerDied","Data":"98f11276819242b37e2eaf35837fd6415b2448a4bf46105bd0008b5b6917d5c9"} Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.130888 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-759d497878-7pn4c"] Nov 30 06:59:43 crc kubenswrapper[4941]: E1130 06:59:43.131102 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="util" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.131118 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="util" Nov 30 06:59:43 crc kubenswrapper[4941]: E1130 06:59:43.131133 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="pull" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.131141 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="pull" Nov 30 06:59:43 crc kubenswrapper[4941]: E1130 06:59:43.131160 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="extract" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.131167 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="extract" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.131251 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9044019d-a819-45e8-85ff-263b655f3af6" containerName="extract" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.131652 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.135107 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.135201 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.135279 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-j5c7h" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.135312 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.135313 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.151683 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-759d497878-7pn4c"] Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.188716 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/21e69639-56df-4de5-8339-c864a9864237-webhook-cert\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.188755 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx7b8\" (UniqueName: \"kubernetes.io/projected/21e69639-56df-4de5-8339-c864a9864237-kube-api-access-kx7b8\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.188808 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/21e69639-56df-4de5-8339-c864a9864237-apiservice-cert\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.292002 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/21e69639-56df-4de5-8339-c864a9864237-webhook-cert\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.292048 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx7b8\" (UniqueName: \"kubernetes.io/projected/21e69639-56df-4de5-8339-c864a9864237-kube-api-access-kx7b8\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.292099 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/21e69639-56df-4de5-8339-c864a9864237-apiservice-cert\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.305721 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/21e69639-56df-4de5-8339-c864a9864237-webhook-cert\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.306861 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/21e69639-56df-4de5-8339-c864a9864237-apiservice-cert\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.322069 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx7b8\" (UniqueName: \"kubernetes.io/projected/21e69639-56df-4de5-8339-c864a9864237-kube-api-access-kx7b8\") pod \"metallb-operator-controller-manager-759d497878-7pn4c\" (UID: \"21e69639-56df-4de5-8339-c864a9864237\") " pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.431913 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.446061 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.448030 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj"] Nov 30 06:59:43 crc kubenswrapper[4941]: E1130 06:59:43.448241 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="extract-utilities" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.448258 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="extract-utilities" Nov 30 06:59:43 crc kubenswrapper[4941]: E1130 06:59:43.448272 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="registry-server" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.448278 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="registry-server" Nov 30 06:59:43 crc kubenswrapper[4941]: E1130 06:59:43.448292 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="extract-content" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.448298 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="extract-content" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.448416 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" containerName="registry-server" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.448786 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.450801 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.451091 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.451263 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-rvqx8" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.463683 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj"] Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.497210 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rdwh\" (UniqueName: \"kubernetes.io/projected/35bb24de-c515-4934-8de3-dce90b7d06ca-kube-api-access-7rdwh\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.497259 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/35bb24de-c515-4934-8de3-dce90b7d06ca-webhook-cert\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.497291 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/35bb24de-c515-4934-8de3-dce90b7d06ca-apiservice-cert\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.598417 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-utilities\") pod \"68192767-bd6a-4ea9-a040-902042ad77b2\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.598530 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7xpd\" (UniqueName: \"kubernetes.io/projected/68192767-bd6a-4ea9-a040-902042ad77b2-kube-api-access-b7xpd\") pod \"68192767-bd6a-4ea9-a040-902042ad77b2\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.598551 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-catalog-content\") pod \"68192767-bd6a-4ea9-a040-902042ad77b2\" (UID: \"68192767-bd6a-4ea9-a040-902042ad77b2\") " Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.598645 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rdwh\" (UniqueName: \"kubernetes.io/projected/35bb24de-c515-4934-8de3-dce90b7d06ca-kube-api-access-7rdwh\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.598670 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/35bb24de-c515-4934-8de3-dce90b7d06ca-webhook-cert\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.598697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/35bb24de-c515-4934-8de3-dce90b7d06ca-apiservice-cert\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.599320 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-utilities" (OuterVolumeSpecName: "utilities") pod "68192767-bd6a-4ea9-a040-902042ad77b2" (UID: "68192767-bd6a-4ea9-a040-902042ad77b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.602031 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/35bb24de-c515-4934-8de3-dce90b7d06ca-apiservice-cert\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.603275 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68192767-bd6a-4ea9-a040-902042ad77b2-kube-api-access-b7xpd" (OuterVolumeSpecName: "kube-api-access-b7xpd") pod "68192767-bd6a-4ea9-a040-902042ad77b2" (UID: "68192767-bd6a-4ea9-a040-902042ad77b2"). InnerVolumeSpecName "kube-api-access-b7xpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.603287 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/35bb24de-c515-4934-8de3-dce90b7d06ca-webhook-cert\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.621269 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rdwh\" (UniqueName: \"kubernetes.io/projected/35bb24de-c515-4934-8de3-dce90b7d06ca-kube-api-access-7rdwh\") pod \"metallb-operator-webhook-server-55cd88cb6c-wlwwj\" (UID: \"35bb24de-c515-4934-8de3-dce90b7d06ca\") " pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.700355 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7xpd\" (UniqueName: \"kubernetes.io/projected/68192767-bd6a-4ea9-a040-902042ad77b2-kube-api-access-b7xpd\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.700389 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.713167 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-759d497878-7pn4c"] Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.713195 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68192767-bd6a-4ea9-a040-902042ad77b2" (UID: "68192767-bd6a-4ea9-a040-902042ad77b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 06:59:43 crc kubenswrapper[4941]: W1130 06:59:43.715651 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod21e69639_56df_4de5_8339_c864a9864237.slice/crio-5ff9d4b56afd59fd735445d5e45167105d94275a5389d88c34fdcf95a9c6e193 WatchSource:0}: Error finding container 5ff9d4b56afd59fd735445d5e45167105d94275a5389d88c34fdcf95a9c6e193: Status 404 returned error can't find the container with id 5ff9d4b56afd59fd735445d5e45167105d94275a5389d88c34fdcf95a9c6e193 Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.759725 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.802287 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68192767-bd6a-4ea9-a040-902042ad77b2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.920969 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" event={"ID":"21e69639-56df-4de5-8339-c864a9864237","Type":"ContainerStarted","Data":"5ff9d4b56afd59fd735445d5e45167105d94275a5389d88c34fdcf95a9c6e193"} Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.922414 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n5jmg" event={"ID":"68192767-bd6a-4ea9-a040-902042ad77b2","Type":"ContainerDied","Data":"acd8a61dd964e8810691e9b45537a5b9343e7305844594a39486449bf95f6b6c"} Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.922447 4941 scope.go:117] "RemoveContainer" containerID="98f11276819242b37e2eaf35837fd6415b2448a4bf46105bd0008b5b6917d5c9" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.922559 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n5jmg" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.947967 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n5jmg"] Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.954090 4941 scope.go:117] "RemoveContainer" containerID="6335ac6b31978e2d2dc814d5c84c0c2458786ba453c128b5d80982cddc68c042" Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.957014 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n5jmg"] Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.969060 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj"] Nov 30 06:59:43 crc kubenswrapper[4941]: I1130 06:59:43.981961 4941 scope.go:117] "RemoveContainer" containerID="c2ca02854e46098a5b430797ecb7d1c781c1b12275988cc0c5059dc63822d49f" Nov 30 06:59:43 crc kubenswrapper[4941]: W1130 06:59:43.985142 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35bb24de_c515_4934_8de3_dce90b7d06ca.slice/crio-5f9e7f1e3da6b7dffa0a6af54155f83178eba4d5e935ac5c52313497c99a475d WatchSource:0}: Error finding container 5f9e7f1e3da6b7dffa0a6af54155f83178eba4d5e935ac5c52313497c99a475d: Status 404 returned error can't find the container with id 5f9e7f1e3da6b7dffa0a6af54155f83178eba4d5e935ac5c52313497c99a475d Nov 30 06:59:44 crc kubenswrapper[4941]: I1130 06:59:44.936370 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" event={"ID":"35bb24de-c515-4934-8de3-dce90b7d06ca","Type":"ContainerStarted","Data":"5f9e7f1e3da6b7dffa0a6af54155f83178eba4d5e935ac5c52313497c99a475d"} Nov 30 06:59:45 crc kubenswrapper[4941]: I1130 06:59:45.528252 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68192767-bd6a-4ea9-a040-902042ad77b2" path="/var/lib/kubelet/pods/68192767-bd6a-4ea9-a040-902042ad77b2/volumes" Nov 30 06:59:48 crc kubenswrapper[4941]: I1130 06:59:48.981663 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" event={"ID":"35bb24de-c515-4934-8de3-dce90b7d06ca","Type":"ContainerStarted","Data":"37377e2ed8a4627cf2996426b2abaeccb47eca624f48fe66e68b9d2341388a62"} Nov 30 06:59:48 crc kubenswrapper[4941]: I1130 06:59:48.981983 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 06:59:48 crc kubenswrapper[4941]: I1130 06:59:48.984041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" event={"ID":"21e69639-56df-4de5-8339-c864a9864237","Type":"ContainerStarted","Data":"a1dc84ce6c8d3a3296c8edaa320f4d6fbd90c26e66b14398021d8f1bba4db07b"} Nov 30 06:59:48 crc kubenswrapper[4941]: I1130 06:59:48.984183 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 06:59:49 crc kubenswrapper[4941]: I1130 06:59:49.036401 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" podStartSLOduration=1.776960326 podStartE2EDuration="6.036368822s" podCreationTimestamp="2025-11-30 06:59:43 +0000 UTC" firstStartedPulling="2025-11-30 06:59:43.717630368 +0000 UTC m=+804.485801977" lastFinishedPulling="2025-11-30 06:59:47.977038844 +0000 UTC m=+808.745210473" observedRunningTime="2025-11-30 06:59:49.034297044 +0000 UTC m=+809.802468693" watchObservedRunningTime="2025-11-30 06:59:49.036368822 +0000 UTC m=+809.804540481" Nov 30 06:59:49 crc kubenswrapper[4941]: I1130 06:59:49.042720 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" podStartSLOduration=2.040698948 podStartE2EDuration="6.04269821s" podCreationTimestamp="2025-11-30 06:59:43 +0000 UTC" firstStartedPulling="2025-11-30 06:59:43.990853626 +0000 UTC m=+804.759025255" lastFinishedPulling="2025-11-30 06:59:47.992852908 +0000 UTC m=+808.761024517" observedRunningTime="2025-11-30 06:59:49.000769702 +0000 UTC m=+809.768941381" watchObservedRunningTime="2025-11-30 06:59:49.04269821 +0000 UTC m=+809.810869849" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.142754 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs"] Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.143920 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.145891 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.145969 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.160224 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs"] Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.239855 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t5c7\" (UniqueName: \"kubernetes.io/projected/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-kube-api-access-5t5c7\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.239925 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-config-volume\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.239947 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-secret-volume\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.340748 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-config-volume\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.340791 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-secret-volume\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.340867 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t5c7\" (UniqueName: \"kubernetes.io/projected/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-kube-api-access-5t5c7\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.342149 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-config-volume\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.347678 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-secret-volume\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.360192 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t5c7\" (UniqueName: \"kubernetes.io/projected/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-kube-api-access-5t5c7\") pod \"collect-profiles-29408100-kwtxs\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.511862 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:00 crc kubenswrapper[4941]: I1130 07:00:00.696811 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs"] Nov 30 07:00:01 crc kubenswrapper[4941]: I1130 07:00:01.050001 4941 generic.go:334] "Generic (PLEG): container finished" podID="1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" containerID="220fa5542fbb35a72334456ec34032f0c610688a8e0f506d8074c203299214b6" exitCode=0 Nov 30 07:00:01 crc kubenswrapper[4941]: I1130 07:00:01.050041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" event={"ID":"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d","Type":"ContainerDied","Data":"220fa5542fbb35a72334456ec34032f0c610688a8e0f506d8074c203299214b6"} Nov 30 07:00:01 crc kubenswrapper[4941]: I1130 07:00:01.050272 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" event={"ID":"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d","Type":"ContainerStarted","Data":"ca442fe0c91835f2e3b1a3f928f3d18d939b9e0cd2eb8d9014169506ab5aa444"} Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.277062 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.364092 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-secret-volume\") pod \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.364229 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-config-volume\") pod \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.364274 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t5c7\" (UniqueName: \"kubernetes.io/projected/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-kube-api-access-5t5c7\") pod \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\" (UID: \"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d\") " Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.365008 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-config-volume" (OuterVolumeSpecName: "config-volume") pod "1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" (UID: "1f6d08f8-1b6d-4579-bde6-104b9c1aac9d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.370379 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-kube-api-access-5t5c7" (OuterVolumeSpecName: "kube-api-access-5t5c7") pod "1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" (UID: "1f6d08f8-1b6d-4579-bde6-104b9c1aac9d"). InnerVolumeSpecName "kube-api-access-5t5c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.376369 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" (UID: "1f6d08f8-1b6d-4579-bde6-104b9c1aac9d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.465313 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.465364 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.465374 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t5c7\" (UniqueName: \"kubernetes.io/projected/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d-kube-api-access-5t5c7\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.979053 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:00:02 crc kubenswrapper[4941]: I1130 07:00:02.979110 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:00:03 crc kubenswrapper[4941]: I1130 07:00:03.061728 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" event={"ID":"1f6d08f8-1b6d-4579-bde6-104b9c1aac9d","Type":"ContainerDied","Data":"ca442fe0c91835f2e3b1a3f928f3d18d939b9e0cd2eb8d9014169506ab5aa444"} Nov 30 07:00:03 crc kubenswrapper[4941]: I1130 07:00:03.061770 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca442fe0c91835f2e3b1a3f928f3d18d939b9e0cd2eb8d9014169506ab5aa444" Nov 30 07:00:03 crc kubenswrapper[4941]: I1130 07:00:03.061782 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs" Nov 30 07:00:03 crc kubenswrapper[4941]: I1130 07:00:03.765569 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-55cd88cb6c-wlwwj" Nov 30 07:00:23 crc kubenswrapper[4941]: I1130 07:00:23.451254 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-759d497878-7pn4c" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.288911 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl"] Nov 30 07:00:24 crc kubenswrapper[4941]: E1130 07:00:24.289190 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" containerName="collect-profiles" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.289207 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" containerName="collect-profiles" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.289319 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" containerName="collect-profiles" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.289715 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.292703 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.292768 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-68fsr" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.293409 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-zfmgz"] Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.295846 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.297743 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.301375 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.309058 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl"] Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.371586 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-rqtrj"] Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.372694 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.375359 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.376141 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.376506 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-t7jr5" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-metrics-certs\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382086 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-startup\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382113 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-reloader\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382140 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgb7j\" (UniqueName: \"kubernetes.io/projected/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-kube-api-access-sgb7j\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382160 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-conf\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382229 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bb61fc6-70c2-43ee-ae35-e597b7033250-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wbtcl\" (UID: \"7bb61fc6-70c2-43ee-ae35-e597b7033250\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382497 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvpt4\" (UniqueName: \"kubernetes.io/projected/7bb61fc6-70c2-43ee-ae35-e597b7033250-kube-api-access-lvpt4\") pod \"frr-k8s-webhook-server-7fcb986d4-wbtcl\" (UID: \"7bb61fc6-70c2-43ee-ae35-e597b7033250\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382504 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382546 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-metrics\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.382698 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-sockets\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.394795 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-8gdxg"] Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.396049 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.399339 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.407123 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-8gdxg"] Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483704 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-conf\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483777 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-metallb-excludel2\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483800 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/573f98db-7738-42ea-9668-ce64acfb316b-cert\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483818 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/573f98db-7738-42ea-9668-ce64acfb316b-metrics-certs\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483849 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bb61fc6-70c2-43ee-ae35-e597b7033250-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wbtcl\" (UID: \"7bb61fc6-70c2-43ee-ae35-e597b7033250\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483956 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvpt4\" (UniqueName: \"kubernetes.io/projected/7bb61fc6-70c2-43ee-ae35-e597b7033250-kube-api-access-lvpt4\") pod \"frr-k8s-webhook-server-7fcb986d4-wbtcl\" (UID: \"7bb61fc6-70c2-43ee-ae35-e597b7033250\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.483997 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-metrics\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484035 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-sockets\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484136 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-metrics-certs\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484174 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fzm9\" (UniqueName: \"kubernetes.io/projected/573f98db-7738-42ea-9668-ce64acfb316b-kube-api-access-8fzm9\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484215 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdrc6\" (UniqueName: \"kubernetes.io/projected/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-kube-api-access-kdrc6\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484232 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-startup\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484286 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-reloader\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484314 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-metrics-certs\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484352 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484386 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgb7j\" (UniqueName: \"kubernetes.io/projected/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-kube-api-access-sgb7j\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.485632 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-startup\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.485827 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-reloader\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.484278 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-conf\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.485857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-metrics\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.486119 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-frr-sockets\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.493028 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-metrics-certs\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.500945 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvpt4\" (UniqueName: \"kubernetes.io/projected/7bb61fc6-70c2-43ee-ae35-e597b7033250-kube-api-access-lvpt4\") pod \"frr-k8s-webhook-server-7fcb986d4-wbtcl\" (UID: \"7bb61fc6-70c2-43ee-ae35-e597b7033250\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.501299 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgb7j\" (UniqueName: \"kubernetes.io/projected/b6c0593d-b6b1-422c-84ec-a5cf8726ee39-kube-api-access-sgb7j\") pod \"frr-k8s-zfmgz\" (UID: \"b6c0593d-b6b1-422c-84ec-a5cf8726ee39\") " pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.502306 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7bb61fc6-70c2-43ee-ae35-e597b7033250-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wbtcl\" (UID: \"7bb61fc6-70c2-43ee-ae35-e597b7033250\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585545 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fzm9\" (UniqueName: \"kubernetes.io/projected/573f98db-7738-42ea-9668-ce64acfb316b-kube-api-access-8fzm9\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585616 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdrc6\" (UniqueName: \"kubernetes.io/projected/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-kube-api-access-kdrc6\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585658 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-metrics-certs\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585679 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585704 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-metallb-excludel2\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585720 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/573f98db-7738-42ea-9668-ce64acfb316b-cert\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.585738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/573f98db-7738-42ea-9668-ce64acfb316b-metrics-certs\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: E1130 07:00:24.586629 4941 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 30 07:00:24 crc kubenswrapper[4941]: E1130 07:00:24.586713 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist podName:fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b nodeName:}" failed. No retries permitted until 2025-11-30 07:00:25.086692774 +0000 UTC m=+845.854864383 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist") pod "speaker-rqtrj" (UID: "fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b") : secret "metallb-memberlist" not found Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.587250 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-metallb-excludel2\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.590402 4941 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.590497 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/573f98db-7738-42ea-9668-ce64acfb316b-metrics-certs\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.591730 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-metrics-certs\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.602266 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdrc6\" (UniqueName: \"kubernetes.io/projected/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-kube-api-access-kdrc6\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.603093 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fzm9\" (UniqueName: \"kubernetes.io/projected/573f98db-7738-42ea-9668-ce64acfb316b-kube-api-access-8fzm9\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.605109 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.606006 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/573f98db-7738-42ea-9668-ce64acfb316b-cert\") pod \"controller-f8648f98b-8gdxg\" (UID: \"573f98db-7738-42ea-9668-ce64acfb316b\") " pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.612120 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.709984 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.871825 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl"] Nov 30 07:00:24 crc kubenswrapper[4941]: W1130 07:00:24.884411 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bb61fc6_70c2_43ee_ae35_e597b7033250.slice/crio-1e5b7b2426d2b76471e65162d3cf2cedd31d360969afc641cee03aa79e77a89a WatchSource:0}: Error finding container 1e5b7b2426d2b76471e65162d3cf2cedd31d360969afc641cee03aa79e77a89a: Status 404 returned error can't find the container with id 1e5b7b2426d2b76471e65162d3cf2cedd31d360969afc641cee03aa79e77a89a Nov 30 07:00:24 crc kubenswrapper[4941]: I1130 07:00:24.926248 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-8gdxg"] Nov 30 07:00:24 crc kubenswrapper[4941]: W1130 07:00:24.929907 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod573f98db_7738_42ea_9668_ce64acfb316b.slice/crio-475546cf04ff184194707b1cb1bb85ddc8a0d35168db31ff69ca97e30632da17 WatchSource:0}: Error finding container 475546cf04ff184194707b1cb1bb85ddc8a0d35168db31ff69ca97e30632da17: Status 404 returned error can't find the container with id 475546cf04ff184194707b1cb1bb85ddc8a0d35168db31ff69ca97e30632da17 Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.093930 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:25 crc kubenswrapper[4941]: E1130 07:00:25.094072 4941 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 30 07:00:25 crc kubenswrapper[4941]: E1130 07:00:25.094663 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist podName:fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b nodeName:}" failed. No retries permitted until 2025-11-30 07:00:26.094639525 +0000 UTC m=+846.862811134 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist") pod "speaker-rqtrj" (UID: "fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b") : secret "metallb-memberlist" not found Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.198002 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-8gdxg" event={"ID":"573f98db-7738-42ea-9668-ce64acfb316b","Type":"ContainerStarted","Data":"2e1e997cc3268779343d1b4fb866f786945b37a255ca2bebeb4c5cd43a8cf46a"} Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.198049 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-8gdxg" event={"ID":"573f98db-7738-42ea-9668-ce64acfb316b","Type":"ContainerStarted","Data":"7cf07fd64d3f00b8c06f92eb6fcfb7ae052b2a5ce99b212abde7a60f72481d03"} Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.198061 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-8gdxg" event={"ID":"573f98db-7738-42ea-9668-ce64acfb316b","Type":"ContainerStarted","Data":"475546cf04ff184194707b1cb1bb85ddc8a0d35168db31ff69ca97e30632da17"} Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.198156 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.199523 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"5ef1a180909ed6741adb81d72b37c7356b8090d4abd075c466863b36b9ef6fa3"} Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.200665 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" event={"ID":"7bb61fc6-70c2-43ee-ae35-e597b7033250","Type":"ContainerStarted","Data":"1e5b7b2426d2b76471e65162d3cf2cedd31d360969afc641cee03aa79e77a89a"} Nov 30 07:00:25 crc kubenswrapper[4941]: I1130 07:00:25.219562 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-8gdxg" podStartSLOduration=1.219535378 podStartE2EDuration="1.219535378s" podCreationTimestamp="2025-11-30 07:00:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:00:25.21578913 +0000 UTC m=+845.983960749" watchObservedRunningTime="2025-11-30 07:00:25.219535378 +0000 UTC m=+845.987706997" Nov 30 07:00:26 crc kubenswrapper[4941]: I1130 07:00:26.107618 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:26 crc kubenswrapper[4941]: I1130 07:00:26.114936 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b-memberlist\") pod \"speaker-rqtrj\" (UID: \"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b\") " pod="metallb-system/speaker-rqtrj" Nov 30 07:00:26 crc kubenswrapper[4941]: I1130 07:00:26.187175 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rqtrj" Nov 30 07:00:26 crc kubenswrapper[4941]: W1130 07:00:26.216153 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdc7052b_fe5b_416d_b1d4_6f0ccfd8193b.slice/crio-04cd46e4bb63e6d294e31cd53ba56b6c189a173d5ed43e5db5d4197e796b79c8 WatchSource:0}: Error finding container 04cd46e4bb63e6d294e31cd53ba56b6c189a173d5ed43e5db5d4197e796b79c8: Status 404 returned error can't find the container with id 04cd46e4bb63e6d294e31cd53ba56b6c189a173d5ed43e5db5d4197e796b79c8 Nov 30 07:00:27 crc kubenswrapper[4941]: I1130 07:00:27.216500 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rqtrj" event={"ID":"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b","Type":"ContainerStarted","Data":"30621df954065aea3eda8992203c673684d88a4c6c878899803724c7019b4a6a"} Nov 30 07:00:27 crc kubenswrapper[4941]: I1130 07:00:27.216546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rqtrj" event={"ID":"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b","Type":"ContainerStarted","Data":"96a87a6867f6f82a61edf232414dda32b9278ea5891fc98a89ad43d98b55dd22"} Nov 30 07:00:27 crc kubenswrapper[4941]: I1130 07:00:27.216557 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rqtrj" event={"ID":"fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b","Type":"ContainerStarted","Data":"04cd46e4bb63e6d294e31cd53ba56b6c189a173d5ed43e5db5d4197e796b79c8"} Nov 30 07:00:27 crc kubenswrapper[4941]: I1130 07:00:27.216747 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-rqtrj" Nov 30 07:00:27 crc kubenswrapper[4941]: I1130 07:00:27.238023 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-rqtrj" podStartSLOduration=3.238004128 podStartE2EDuration="3.238004128s" podCreationTimestamp="2025-11-30 07:00:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:00:27.235092737 +0000 UTC m=+848.003264346" watchObservedRunningTime="2025-11-30 07:00:27.238004128 +0000 UTC m=+848.006175737" Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.245655 4941 generic.go:334] "Generic (PLEG): container finished" podID="b6c0593d-b6b1-422c-84ec-a5cf8726ee39" containerID="61ad34620653f874c53d4664b9374275962088419c43199cedaad10b0695e645" exitCode=0 Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.245711 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerDied","Data":"61ad34620653f874c53d4664b9374275962088419c43199cedaad10b0695e645"} Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.247987 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" event={"ID":"7bb61fc6-70c2-43ee-ae35-e597b7033250","Type":"ContainerStarted","Data":"e6273ecedbfc028b61e39163c1a3e9cfcb6f72273686bbcfeafaef8a5dff5fd5"} Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.248205 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.303255 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" podStartSLOduration=1.293757809 podStartE2EDuration="8.303238851s" podCreationTimestamp="2025-11-30 07:00:24 +0000 UTC" firstStartedPulling="2025-11-30 07:00:24.887712337 +0000 UTC m=+845.655883946" lastFinishedPulling="2025-11-30 07:00:31.897193389 +0000 UTC m=+852.665364988" observedRunningTime="2025-11-30 07:00:32.301461896 +0000 UTC m=+853.069633555" watchObservedRunningTime="2025-11-30 07:00:32.303238851 +0000 UTC m=+853.071410470" Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.978077 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.978135 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.978192 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.978823 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"88a82781e74dc63c736752840ee31da64c053c5a7d4b1a678036abaa19f971dc"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:00:32 crc kubenswrapper[4941]: I1130 07:00:32.978887 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://88a82781e74dc63c736752840ee31da64c053c5a7d4b1a678036abaa19f971dc" gracePeriod=600 Nov 30 07:00:33 crc kubenswrapper[4941]: I1130 07:00:33.255979 4941 generic.go:334] "Generic (PLEG): container finished" podID="b6c0593d-b6b1-422c-84ec-a5cf8726ee39" containerID="4e4ea00aef5d73ddfcfa2a37ce001d9a2dd7955ba5626b4014aafcdd8610076c" exitCode=0 Nov 30 07:00:33 crc kubenswrapper[4941]: I1130 07:00:33.256112 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerDied","Data":"4e4ea00aef5d73ddfcfa2a37ce001d9a2dd7955ba5626b4014aafcdd8610076c"} Nov 30 07:00:33 crc kubenswrapper[4941]: I1130 07:00:33.260947 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="88a82781e74dc63c736752840ee31da64c053c5a7d4b1a678036abaa19f971dc" exitCode=0 Nov 30 07:00:33 crc kubenswrapper[4941]: I1130 07:00:33.261037 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"88a82781e74dc63c736752840ee31da64c053c5a7d4b1a678036abaa19f971dc"} Nov 30 07:00:33 crc kubenswrapper[4941]: I1130 07:00:33.261107 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"c56f9f6e36d888e40d67920be6ce9775fda0a6740b4e0f2b802e64b1e8fb285b"} Nov 30 07:00:33 crc kubenswrapper[4941]: I1130 07:00:33.261141 4941 scope.go:117] "RemoveContainer" containerID="2999b81d3c5dcc4cb61fb09cb18df1d902ada4e3797e913664d663ea2105dfb2" Nov 30 07:00:34 crc kubenswrapper[4941]: I1130 07:00:34.272212 4941 generic.go:334] "Generic (PLEG): container finished" podID="b6c0593d-b6b1-422c-84ec-a5cf8726ee39" containerID="81b74ac1923c7af3074bdff44a3ede702cb2b8bc0ba7b0830594304c6212e4c9" exitCode=0 Nov 30 07:00:34 crc kubenswrapper[4941]: I1130 07:00:34.272262 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerDied","Data":"81b74ac1923c7af3074bdff44a3ede702cb2b8bc0ba7b0830594304c6212e4c9"} Nov 30 07:00:35 crc kubenswrapper[4941]: I1130 07:00:35.285768 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"7214d0755162a34b12359970c7c4499c8c169349f9cca86c54e543761acae1ce"} Nov 30 07:00:35 crc kubenswrapper[4941]: I1130 07:00:35.286344 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"e2c4e9ae2c46d4400e339459d927d74ec41419d40c02e260b65fc731aeb56b94"} Nov 30 07:00:35 crc kubenswrapper[4941]: I1130 07:00:35.286357 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"1e19d642b7d1af03ffe60b795c1d3e2a4a04b7cc5f9db36e6d49f8f1264ec232"} Nov 30 07:00:35 crc kubenswrapper[4941]: I1130 07:00:35.286366 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"edf5a9dcfede1dd88df166db40b6fd81adff6890d4798681197afe748a385439"} Nov 30 07:00:35 crc kubenswrapper[4941]: I1130 07:00:35.286376 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"ae59223548e0046a141c2207c19091c3d9b62a2936b512c02f71e61640f21d8c"} Nov 30 07:00:36 crc kubenswrapper[4941]: I1130 07:00:36.191250 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-rqtrj" Nov 30 07:00:36 crc kubenswrapper[4941]: I1130 07:00:36.296164 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zfmgz" event={"ID":"b6c0593d-b6b1-422c-84ec-a5cf8726ee39","Type":"ContainerStarted","Data":"15f48c017d02f8a8d4f471c97e8f74c722619f9c5db6e428c434c92bae887ba7"} Nov 30 07:00:36 crc kubenswrapper[4941]: I1130 07:00:36.297168 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:36 crc kubenswrapper[4941]: I1130 07:00:36.320045 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-zfmgz" podStartSLOduration=5.190456765 podStartE2EDuration="12.320021508s" podCreationTimestamp="2025-11-30 07:00:24 +0000 UTC" firstStartedPulling="2025-11-30 07:00:24.748423943 +0000 UTC m=+845.516595552" lastFinishedPulling="2025-11-30 07:00:31.877988686 +0000 UTC m=+852.646160295" observedRunningTime="2025-11-30 07:00:36.317711975 +0000 UTC m=+857.085883604" watchObservedRunningTime="2025-11-30 07:00:36.320021508 +0000 UTC m=+857.088193127" Nov 30 07:00:37 crc kubenswrapper[4941]: I1130 07:00:37.975215 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf"] Nov 30 07:00:37 crc kubenswrapper[4941]: I1130 07:00:37.977576 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:37 crc kubenswrapper[4941]: I1130 07:00:37.981987 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 30 07:00:37 crc kubenswrapper[4941]: I1130 07:00:37.986764 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf"] Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.101525 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq98k\" (UniqueName: \"kubernetes.io/projected/75189b59-7338-40b9-a1be-5a7e35cabdf6-kube-api-access-rq98k\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.101824 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.101910 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.203018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq98k\" (UniqueName: \"kubernetes.io/projected/75189b59-7338-40b9-a1be-5a7e35cabdf6-kube-api-access-rq98k\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.203086 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.203108 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.203626 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.203879 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.222032 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq98k\" (UniqueName: \"kubernetes.io/projected/75189b59-7338-40b9-a1be-5a7e35cabdf6-kube-api-access-rq98k\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.298606 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:38 crc kubenswrapper[4941]: I1130 07:00:38.703385 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf"] Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.133586 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tdh7n"] Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.136254 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.151668 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tdh7n"] Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.216841 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vrtk\" (UniqueName: \"kubernetes.io/projected/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-kube-api-access-6vrtk\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.216986 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-utilities\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.217127 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-catalog-content\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.317889 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vrtk\" (UniqueName: \"kubernetes.io/projected/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-kube-api-access-6vrtk\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.317992 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-utilities\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.318063 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-catalog-content\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.318602 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-catalog-content\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.318603 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-utilities\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.321552 4941 generic.go:334] "Generic (PLEG): container finished" podID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerID="2de4096c849df0c4512f6c72beea98dd6153ee7e0791591f4769bee22c7eb7c7" exitCode=0 Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.321602 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" event={"ID":"75189b59-7338-40b9-a1be-5a7e35cabdf6","Type":"ContainerDied","Data":"2de4096c849df0c4512f6c72beea98dd6153ee7e0791591f4769bee22c7eb7c7"} Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.321634 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" event={"ID":"75189b59-7338-40b9-a1be-5a7e35cabdf6","Type":"ContainerStarted","Data":"d60f049ca0969101c3ab261de0267d482b551c905e0d1881f2c16ef2d0d42230"} Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.354457 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vrtk\" (UniqueName: \"kubernetes.io/projected/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-kube-api-access-6vrtk\") pod \"community-operators-tdh7n\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.450971 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.615893 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.663665 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:39 crc kubenswrapper[4941]: I1130 07:00:39.986060 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tdh7n"] Nov 30 07:00:40 crc kubenswrapper[4941]: I1130 07:00:40.330593 4941 generic.go:334] "Generic (PLEG): container finished" podID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerID="dbcdbf7743dfda5fa5381e7c61bcdb0b285fb0ed6517d6176f8d51c8b82231dd" exitCode=0 Nov 30 07:00:40 crc kubenswrapper[4941]: I1130 07:00:40.330687 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdh7n" event={"ID":"619f8d8f-0548-4ae8-8d4e-8f35172da2f9","Type":"ContainerDied","Data":"dbcdbf7743dfda5fa5381e7c61bcdb0b285fb0ed6517d6176f8d51c8b82231dd"} Nov 30 07:00:40 crc kubenswrapper[4941]: I1130 07:00:40.331583 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdh7n" event={"ID":"619f8d8f-0548-4ae8-8d4e-8f35172da2f9","Type":"ContainerStarted","Data":"e7e86ab2810cb408afd2216e3d43b69c679976b704c3302318bbd1f4fbe9a3ca"} Nov 30 07:00:43 crc kubenswrapper[4941]: I1130 07:00:43.353191 4941 generic.go:334] "Generic (PLEG): container finished" podID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerID="c1f8a06e9f3e1f8aea86f75e4cb352666f68ae1bac080b946cdfff283f0dbe32" exitCode=0 Nov 30 07:00:43 crc kubenswrapper[4941]: I1130 07:00:43.353306 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" event={"ID":"75189b59-7338-40b9-a1be-5a7e35cabdf6","Type":"ContainerDied","Data":"c1f8a06e9f3e1f8aea86f75e4cb352666f68ae1bac080b946cdfff283f0dbe32"} Nov 30 07:00:43 crc kubenswrapper[4941]: I1130 07:00:43.357918 4941 generic.go:334] "Generic (PLEG): container finished" podID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerID="52f022918ceb910730a9358b9a9bcc082aaea101ef6e8d82b268721eda05c70f" exitCode=0 Nov 30 07:00:43 crc kubenswrapper[4941]: I1130 07:00:43.357996 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdh7n" event={"ID":"619f8d8f-0548-4ae8-8d4e-8f35172da2f9","Type":"ContainerDied","Data":"52f022918ceb910730a9358b9a9bcc082aaea101ef6e8d82b268721eda05c70f"} Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.365591 4941 generic.go:334] "Generic (PLEG): container finished" podID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerID="f9bbc736a1e55d71b4d344d416cd7368e906596303446a86471d8851513ba07f" exitCode=0 Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.365667 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" event={"ID":"75189b59-7338-40b9-a1be-5a7e35cabdf6","Type":"ContainerDied","Data":"f9bbc736a1e55d71b4d344d416cd7368e906596303446a86471d8851513ba07f"} Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.368163 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdh7n" event={"ID":"619f8d8f-0548-4ae8-8d4e-8f35172da2f9","Type":"ContainerStarted","Data":"f8b3b02d6e2cdfdcc3fa997e4a59731bcf223d099192803fd4ba259d6abef918"} Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.401435 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tdh7n" podStartSLOduration=1.5953778349999999 podStartE2EDuration="5.401414124s" podCreationTimestamp="2025-11-30 07:00:39 +0000 UTC" firstStartedPulling="2025-11-30 07:00:40.332260882 +0000 UTC m=+861.100432491" lastFinishedPulling="2025-11-30 07:00:44.138297171 +0000 UTC m=+864.906468780" observedRunningTime="2025-11-30 07:00:44.396874431 +0000 UTC m=+865.165046050" watchObservedRunningTime="2025-11-30 07:00:44.401414124 +0000 UTC m=+865.169585733" Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.610829 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wbtcl" Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.617028 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-zfmgz" Nov 30 07:00:44 crc kubenswrapper[4941]: I1130 07:00:44.713872 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-8gdxg" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.621642 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.720038 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-util\") pod \"75189b59-7338-40b9-a1be-5a7e35cabdf6\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.720184 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq98k\" (UniqueName: \"kubernetes.io/projected/75189b59-7338-40b9-a1be-5a7e35cabdf6-kube-api-access-rq98k\") pod \"75189b59-7338-40b9-a1be-5a7e35cabdf6\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.721196 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-bundle\") pod \"75189b59-7338-40b9-a1be-5a7e35cabdf6\" (UID: \"75189b59-7338-40b9-a1be-5a7e35cabdf6\") " Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.722138 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-bundle" (OuterVolumeSpecName: "bundle") pod "75189b59-7338-40b9-a1be-5a7e35cabdf6" (UID: "75189b59-7338-40b9-a1be-5a7e35cabdf6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.731466 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-util" (OuterVolumeSpecName: "util") pod "75189b59-7338-40b9-a1be-5a7e35cabdf6" (UID: "75189b59-7338-40b9-a1be-5a7e35cabdf6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.732609 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75189b59-7338-40b9-a1be-5a7e35cabdf6-kube-api-access-rq98k" (OuterVolumeSpecName: "kube-api-access-rq98k") pod "75189b59-7338-40b9-a1be-5a7e35cabdf6" (UID: "75189b59-7338-40b9-a1be-5a7e35cabdf6"). InnerVolumeSpecName "kube-api-access-rq98k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.822742 4941 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-util\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.822770 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq98k\" (UniqueName: \"kubernetes.io/projected/75189b59-7338-40b9-a1be-5a7e35cabdf6-kube-api-access-rq98k\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:45 crc kubenswrapper[4941]: I1130 07:00:45.822782 4941 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/75189b59-7338-40b9-a1be-5a7e35cabdf6-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:46 crc kubenswrapper[4941]: I1130 07:00:46.380867 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" event={"ID":"75189b59-7338-40b9-a1be-5a7e35cabdf6","Type":"ContainerDied","Data":"d60f049ca0969101c3ab261de0267d482b551c905e0d1881f2c16ef2d0d42230"} Nov 30 07:00:46 crc kubenswrapper[4941]: I1130 07:00:46.380920 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf" Nov 30 07:00:46 crc kubenswrapper[4941]: I1130 07:00:46.380933 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d60f049ca0969101c3ab261de0267d482b551c905e0d1881f2c16ef2d0d42230" Nov 30 07:00:49 crc kubenswrapper[4941]: I1130 07:00:49.451893 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:49 crc kubenswrapper[4941]: I1130 07:00:49.452213 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:49 crc kubenswrapper[4941]: I1130 07:00:49.500919 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:50 crc kubenswrapper[4941]: I1130 07:00:50.488297 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.213230 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw"] Nov 30 07:00:51 crc kubenswrapper[4941]: E1130 07:00:51.213520 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="extract" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.213537 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="extract" Nov 30 07:00:51 crc kubenswrapper[4941]: E1130 07:00:51.213560 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="util" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.213568 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="util" Nov 30 07:00:51 crc kubenswrapper[4941]: E1130 07:00:51.213581 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="pull" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.213589 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="pull" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.213736 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="75189b59-7338-40b9-a1be-5a7e35cabdf6" containerName="extract" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.214173 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.216339 4941 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-l276t" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.216673 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.216857 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.228883 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw"] Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.297962 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g646d\" (UniqueName: \"kubernetes.io/projected/a81acc01-279b-4b01-a6b7-8015de8fe5eb-kube-api-access-g646d\") pod \"cert-manager-operator-controller-manager-64cf6dff88-rwjqw\" (UID: \"a81acc01-279b-4b01-a6b7-8015de8fe5eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.298044 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a81acc01-279b-4b01-a6b7-8015de8fe5eb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-rwjqw\" (UID: \"a81acc01-279b-4b01-a6b7-8015de8fe5eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.399342 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a81acc01-279b-4b01-a6b7-8015de8fe5eb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-rwjqw\" (UID: \"a81acc01-279b-4b01-a6b7-8015de8fe5eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.399489 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g646d\" (UniqueName: \"kubernetes.io/projected/a81acc01-279b-4b01-a6b7-8015de8fe5eb-kube-api-access-g646d\") pod \"cert-manager-operator-controller-manager-64cf6dff88-rwjqw\" (UID: \"a81acc01-279b-4b01-a6b7-8015de8fe5eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.400751 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a81acc01-279b-4b01-a6b7-8015de8fe5eb-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-rwjqw\" (UID: \"a81acc01-279b-4b01-a6b7-8015de8fe5eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.421926 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g646d\" (UniqueName: \"kubernetes.io/projected/a81acc01-279b-4b01-a6b7-8015de8fe5eb-kube-api-access-g646d\") pod \"cert-manager-operator-controller-manager-64cf6dff88-rwjqw\" (UID: \"a81acc01-279b-4b01-a6b7-8015de8fe5eb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.546798 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" Nov 30 07:00:51 crc kubenswrapper[4941]: I1130 07:00:51.866478 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw"] Nov 30 07:00:51 crc kubenswrapper[4941]: W1130 07:00:51.877851 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda81acc01_279b_4b01_a6b7_8015de8fe5eb.slice/crio-2f02fa077f6338c31897ea46160b5af1f2b6f1aa32f60d9a0647e7ccffdab175 WatchSource:0}: Error finding container 2f02fa077f6338c31897ea46160b5af1f2b6f1aa32f60d9a0647e7ccffdab175: Status 404 returned error can't find the container with id 2f02fa077f6338c31897ea46160b5af1f2b6f1aa32f60d9a0647e7ccffdab175 Nov 30 07:00:52 crc kubenswrapper[4941]: I1130 07:00:52.423600 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" event={"ID":"a81acc01-279b-4b01-a6b7-8015de8fe5eb","Type":"ContainerStarted","Data":"2f02fa077f6338c31897ea46160b5af1f2b6f1aa32f60d9a0647e7ccffdab175"} Nov 30 07:00:53 crc kubenswrapper[4941]: I1130 07:00:53.533445 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tdh7n"] Nov 30 07:00:53 crc kubenswrapper[4941]: I1130 07:00:53.534134 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tdh7n" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="registry-server" containerID="cri-o://f8b3b02d6e2cdfdcc3fa997e4a59731bcf223d099192803fd4ba259d6abef918" gracePeriod=2 Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.446234 4941 generic.go:334] "Generic (PLEG): container finished" podID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerID="f8b3b02d6e2cdfdcc3fa997e4a59731bcf223d099192803fd4ba259d6abef918" exitCode=0 Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.446271 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdh7n" event={"ID":"619f8d8f-0548-4ae8-8d4e-8f35172da2f9","Type":"ContainerDied","Data":"f8b3b02d6e2cdfdcc3fa997e4a59731bcf223d099192803fd4ba259d6abef918"} Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.500300 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.652128 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vrtk\" (UniqueName: \"kubernetes.io/projected/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-kube-api-access-6vrtk\") pod \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.652972 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-utilities\") pod \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.655185 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-utilities" (OuterVolumeSpecName: "utilities") pod "619f8d8f-0548-4ae8-8d4e-8f35172da2f9" (UID: "619f8d8f-0548-4ae8-8d4e-8f35172da2f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.655317 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-catalog-content\") pod \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\" (UID: \"619f8d8f-0548-4ae8-8d4e-8f35172da2f9\") " Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.656116 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.661587 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-kube-api-access-6vrtk" (OuterVolumeSpecName: "kube-api-access-6vrtk") pod "619f8d8f-0548-4ae8-8d4e-8f35172da2f9" (UID: "619f8d8f-0548-4ae8-8d4e-8f35172da2f9"). InnerVolumeSpecName "kube-api-access-6vrtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.709382 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "619f8d8f-0548-4ae8-8d4e-8f35172da2f9" (UID: "619f8d8f-0548-4ae8-8d4e-8f35172da2f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.757086 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:54 crc kubenswrapper[4941]: I1130 07:00:54.757121 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vrtk\" (UniqueName: \"kubernetes.io/projected/619f8d8f-0548-4ae8-8d4e-8f35172da2f9-kube-api-access-6vrtk\") on node \"crc\" DevicePath \"\"" Nov 30 07:00:55 crc kubenswrapper[4941]: I1130 07:00:55.459645 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdh7n" event={"ID":"619f8d8f-0548-4ae8-8d4e-8f35172da2f9","Type":"ContainerDied","Data":"e7e86ab2810cb408afd2216e3d43b69c679976b704c3302318bbd1f4fbe9a3ca"} Nov 30 07:00:55 crc kubenswrapper[4941]: I1130 07:00:55.459706 4941 scope.go:117] "RemoveContainer" containerID="f8b3b02d6e2cdfdcc3fa997e4a59731bcf223d099192803fd4ba259d6abef918" Nov 30 07:00:55 crc kubenswrapper[4941]: I1130 07:00:55.459723 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdh7n" Nov 30 07:00:55 crc kubenswrapper[4941]: I1130 07:00:55.574006 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tdh7n"] Nov 30 07:00:55 crc kubenswrapper[4941]: I1130 07:00:55.577942 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tdh7n"] Nov 30 07:00:57 crc kubenswrapper[4941]: I1130 07:00:57.530310 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" path="/var/lib/kubelet/pods/619f8d8f-0548-4ae8-8d4e-8f35172da2f9/volumes" Nov 30 07:00:59 crc kubenswrapper[4941]: I1130 07:00:59.587378 4941 scope.go:117] "RemoveContainer" containerID="52f022918ceb910730a9358b9a9bcc082aaea101ef6e8d82b268721eda05c70f" Nov 30 07:00:59 crc kubenswrapper[4941]: I1130 07:00:59.634859 4941 scope.go:117] "RemoveContainer" containerID="dbcdbf7743dfda5fa5381e7c61bcdb0b285fb0ed6517d6176f8d51c8b82231dd" Nov 30 07:01:00 crc kubenswrapper[4941]: I1130 07:01:00.502836 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" event={"ID":"a81acc01-279b-4b01-a6b7-8015de8fe5eb","Type":"ContainerStarted","Data":"995a386a0b8b4703eea3033e9da0d1ba8cee12035549febcc1cb0c189657b86e"} Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.808420 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-rwjqw" podStartSLOduration=5.003274039 podStartE2EDuration="12.808404859s" podCreationTimestamp="2025-11-30 07:00:51 +0000 UTC" firstStartedPulling="2025-11-30 07:00:51.882107144 +0000 UTC m=+872.650278743" lastFinishedPulling="2025-11-30 07:00:59.687237954 +0000 UTC m=+880.455409563" observedRunningTime="2025-11-30 07:01:00.541367178 +0000 UTC m=+881.309538797" watchObservedRunningTime="2025-11-30 07:01:03.808404859 +0000 UTC m=+884.576576468" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.812389 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-lbhgf"] Nov 30 07:01:03 crc kubenswrapper[4941]: E1130 07:01:03.812592 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="extract-content" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.812608 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="extract-content" Nov 30 07:01:03 crc kubenswrapper[4941]: E1130 07:01:03.812624 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="extract-utilities" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.812630 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="extract-utilities" Nov 30 07:01:03 crc kubenswrapper[4941]: E1130 07:01:03.812646 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="registry-server" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.812652 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="registry-server" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.812758 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="619f8d8f-0548-4ae8-8d4e-8f35172da2f9" containerName="registry-server" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.813100 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.816040 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.816149 4941 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-hkt26" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.816587 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.823504 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-lbhgf"] Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.920509 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-lbhgf\" (UID: \"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:03 crc kubenswrapper[4941]: I1130 07:01:03.920813 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxff6\" (UniqueName: \"kubernetes.io/projected/78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a-kube-api-access-kxff6\") pod \"cert-manager-webhook-f4fb5df64-lbhgf\" (UID: \"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.022243 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-lbhgf\" (UID: \"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.022311 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxff6\" (UniqueName: \"kubernetes.io/projected/78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a-kube-api-access-kxff6\") pod \"cert-manager-webhook-f4fb5df64-lbhgf\" (UID: \"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.042819 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-lbhgf\" (UID: \"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.043226 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxff6\" (UniqueName: \"kubernetes.io/projected/78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a-kube-api-access-kxff6\") pod \"cert-manager-webhook-f4fb5df64-lbhgf\" (UID: \"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.126613 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.443997 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-lbhgf"] Nov 30 07:01:04 crc kubenswrapper[4941]: I1130 07:01:04.537241 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" event={"ID":"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a","Type":"ContainerStarted","Data":"e60ce6658af0f139150b1b35cf19ca05c3fecee4acdc9da3e73b1ab12781460c"} Nov 30 07:01:07 crc kubenswrapper[4941]: I1130 07:01:07.936160 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rwnff"] Nov 30 07:01:07 crc kubenswrapper[4941]: I1130 07:01:07.938711 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:07 crc kubenswrapper[4941]: I1130 07:01:07.941936 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rwnff"] Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.080864 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-catalog-content\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.080916 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-utilities\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.080948 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7swx9\" (UniqueName: \"kubernetes.io/projected/400719f0-9dcc-4932-8f56-099dbb450122-kube-api-access-7swx9\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.182849 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-catalog-content\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.182891 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-utilities\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.182912 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7swx9\" (UniqueName: \"kubernetes.io/projected/400719f0-9dcc-4932-8f56-099dbb450122-kube-api-access-7swx9\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.183558 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-catalog-content\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.183838 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-utilities\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.202002 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7swx9\" (UniqueName: \"kubernetes.io/projected/400719f0-9dcc-4932-8f56-099dbb450122-kube-api-access-7swx9\") pod \"certified-operators-rwnff\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:08 crc kubenswrapper[4941]: I1130 07:01:08.254555 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.130582 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn"] Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.132825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.134405 4941 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-4drq8" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.144848 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn"] Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.212000 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8ts7\" (UniqueName: \"kubernetes.io/projected/c4fdd60d-955a-47b0-b721-f8d3517f866f-kube-api-access-t8ts7\") pod \"cert-manager-cainjector-855d9ccff4-2xsxn\" (UID: \"c4fdd60d-955a-47b0-b721-f8d3517f866f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.212052 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c4fdd60d-955a-47b0-b721-f8d3517f866f-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-2xsxn\" (UID: \"c4fdd60d-955a-47b0-b721-f8d3517f866f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.313348 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8ts7\" (UniqueName: \"kubernetes.io/projected/c4fdd60d-955a-47b0-b721-f8d3517f866f-kube-api-access-t8ts7\") pod \"cert-manager-cainjector-855d9ccff4-2xsxn\" (UID: \"c4fdd60d-955a-47b0-b721-f8d3517f866f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.313398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c4fdd60d-955a-47b0-b721-f8d3517f866f-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-2xsxn\" (UID: \"c4fdd60d-955a-47b0-b721-f8d3517f866f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.329944 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c4fdd60d-955a-47b0-b721-f8d3517f866f-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-2xsxn\" (UID: \"c4fdd60d-955a-47b0-b721-f8d3517f866f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.330478 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8ts7\" (UniqueName: \"kubernetes.io/projected/c4fdd60d-955a-47b0-b721-f8d3517f866f-kube-api-access-t8ts7\") pod \"cert-manager-cainjector-855d9ccff4-2xsxn\" (UID: \"c4fdd60d-955a-47b0-b721-f8d3517f866f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:10 crc kubenswrapper[4941]: I1130 07:01:10.453287 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.037201 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rwnff"] Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.106987 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn"] Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.634532 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" event={"ID":"c4fdd60d-955a-47b0-b721-f8d3517f866f","Type":"ContainerStarted","Data":"b6e8b9bf5a82b3626ad489320268ea486484069e462a51dcff4a9c63d2fb8225"} Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.635266 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" event={"ID":"c4fdd60d-955a-47b0-b721-f8d3517f866f","Type":"ContainerStarted","Data":"aff80ba0a1c936d839982b75226c1cf54d01d58471177a3f6da3794d18c826fa"} Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.635690 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" event={"ID":"78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a","Type":"ContainerStarted","Data":"63fb974d7041ecf0187605565f6d3739085dafb20d2b2a1b32ab98be015c1a35"} Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.635777 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.636730 4941 generic.go:334] "Generic (PLEG): container finished" podID="400719f0-9dcc-4932-8f56-099dbb450122" containerID="6386b5fabd5a63723c680ee0de504712b8131a72bbd0306eabf35288a6b3080f" exitCode=0 Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.636766 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerDied","Data":"6386b5fabd5a63723c680ee0de504712b8131a72bbd0306eabf35288a6b3080f"} Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.636808 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerStarted","Data":"c18aecafd88d2363fdef211cdafac91b1bac55438c9d8adb959ee131c6dd6601"} Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.651920 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-2xsxn" podStartSLOduration=4.651895468 podStartE2EDuration="4.651895468s" podCreationTimestamp="2025-11-30 07:01:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:01:14.650586687 +0000 UTC m=+895.418758296" watchObservedRunningTime="2025-11-30 07:01:14.651895468 +0000 UTC m=+895.420067097" Nov 30 07:01:14 crc kubenswrapper[4941]: I1130 07:01:14.663412 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" podStartSLOduration=2.344903891 podStartE2EDuration="11.663389968s" podCreationTimestamp="2025-11-30 07:01:03 +0000 UTC" firstStartedPulling="2025-11-30 07:01:04.450978598 +0000 UTC m=+885.219150207" lastFinishedPulling="2025-11-30 07:01:13.769464675 +0000 UTC m=+894.537636284" observedRunningTime="2025-11-30 07:01:14.662710898 +0000 UTC m=+895.430882517" watchObservedRunningTime="2025-11-30 07:01:14.663389968 +0000 UTC m=+895.431561597" Nov 30 07:01:15 crc kubenswrapper[4941]: I1130 07:01:15.646620 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerStarted","Data":"66ae34e9d30537e89cbd8c32064372986c802367dc27d507be8d1b1102639917"} Nov 30 07:01:16 crc kubenswrapper[4941]: I1130 07:01:16.652787 4941 generic.go:334] "Generic (PLEG): container finished" podID="400719f0-9dcc-4932-8f56-099dbb450122" containerID="66ae34e9d30537e89cbd8c32064372986c802367dc27d507be8d1b1102639917" exitCode=0 Nov 30 07:01:16 crc kubenswrapper[4941]: I1130 07:01:16.652826 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerDied","Data":"66ae34e9d30537e89cbd8c32064372986c802367dc27d507be8d1b1102639917"} Nov 30 07:01:17 crc kubenswrapper[4941]: I1130 07:01:17.659586 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerStarted","Data":"b1e7fd88360a2580f91b4523a1f758f3511a715b3ab4c44477385eb01c2645cc"} Nov 30 07:01:17 crc kubenswrapper[4941]: I1130 07:01:17.679818 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rwnff" podStartSLOduration=8.101595901 podStartE2EDuration="10.679803409s" podCreationTimestamp="2025-11-30 07:01:07 +0000 UTC" firstStartedPulling="2025-11-30 07:01:14.637679272 +0000 UTC m=+895.405850881" lastFinishedPulling="2025-11-30 07:01:17.21588678 +0000 UTC m=+897.984058389" observedRunningTime="2025-11-30 07:01:17.676858996 +0000 UTC m=+898.445030615" watchObservedRunningTime="2025-11-30 07:01:17.679803409 +0000 UTC m=+898.447975018" Nov 30 07:01:18 crc kubenswrapper[4941]: I1130 07:01:18.256142 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:18 crc kubenswrapper[4941]: I1130 07:01:18.256311 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:19 crc kubenswrapper[4941]: I1130 07:01:19.130947 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-lbhgf" Nov 30 07:01:19 crc kubenswrapper[4941]: I1130 07:01:19.317082 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-rwnff" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="registry-server" probeResult="failure" output=< Nov 30 07:01:19 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 07:01:19 crc kubenswrapper[4941]: > Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.690715 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-9h7bf"] Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.693039 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.694989 4941 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-wclnm" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.706869 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-9h7bf"] Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.791512 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a624fc70-8bbc-4706-b2d8-221ec51c3f2c-bound-sa-token\") pod \"cert-manager-86cb77c54b-9h7bf\" (UID: \"a624fc70-8bbc-4706-b2d8-221ec51c3f2c\") " pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.792641 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdnfq\" (UniqueName: \"kubernetes.io/projected/a624fc70-8bbc-4706-b2d8-221ec51c3f2c-kube-api-access-kdnfq\") pod \"cert-manager-86cb77c54b-9h7bf\" (UID: \"a624fc70-8bbc-4706-b2d8-221ec51c3f2c\") " pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.896289 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdnfq\" (UniqueName: \"kubernetes.io/projected/a624fc70-8bbc-4706-b2d8-221ec51c3f2c-kube-api-access-kdnfq\") pod \"cert-manager-86cb77c54b-9h7bf\" (UID: \"a624fc70-8bbc-4706-b2d8-221ec51c3f2c\") " pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.896491 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a624fc70-8bbc-4706-b2d8-221ec51c3f2c-bound-sa-token\") pod \"cert-manager-86cb77c54b-9h7bf\" (UID: \"a624fc70-8bbc-4706-b2d8-221ec51c3f2c\") " pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.925252 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdnfq\" (UniqueName: \"kubernetes.io/projected/a624fc70-8bbc-4706-b2d8-221ec51c3f2c-kube-api-access-kdnfq\") pod \"cert-manager-86cb77c54b-9h7bf\" (UID: \"a624fc70-8bbc-4706-b2d8-221ec51c3f2c\") " pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:22 crc kubenswrapper[4941]: I1130 07:01:22.926315 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a624fc70-8bbc-4706-b2d8-221ec51c3f2c-bound-sa-token\") pod \"cert-manager-86cb77c54b-9h7bf\" (UID: \"a624fc70-8bbc-4706-b2d8-221ec51c3f2c\") " pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:23 crc kubenswrapper[4941]: I1130 07:01:23.013457 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-9h7bf" Nov 30 07:01:23 crc kubenswrapper[4941]: I1130 07:01:23.257702 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-9h7bf"] Nov 30 07:01:23 crc kubenswrapper[4941]: W1130 07:01:23.261934 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda624fc70_8bbc_4706_b2d8_221ec51c3f2c.slice/crio-860e35ed766ccbc1f7a67803d6005b55240d8e0cd2fdb70cf00b850a66725ea7 WatchSource:0}: Error finding container 860e35ed766ccbc1f7a67803d6005b55240d8e0cd2fdb70cf00b850a66725ea7: Status 404 returned error can't find the container with id 860e35ed766ccbc1f7a67803d6005b55240d8e0cd2fdb70cf00b850a66725ea7 Nov 30 07:01:23 crc kubenswrapper[4941]: I1130 07:01:23.699477 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-9h7bf" event={"ID":"a624fc70-8bbc-4706-b2d8-221ec51c3f2c","Type":"ContainerStarted","Data":"860e35ed766ccbc1f7a67803d6005b55240d8e0cd2fdb70cf00b850a66725ea7"} Nov 30 07:01:24 crc kubenswrapper[4941]: I1130 07:01:24.706247 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-9h7bf" event={"ID":"a624fc70-8bbc-4706-b2d8-221ec51c3f2c","Type":"ContainerStarted","Data":"10a024e3570334aae7a1f2978399527e3f4fa6b36731b1472c30df1932bd851e"} Nov 30 07:01:24 crc kubenswrapper[4941]: I1130 07:01:24.719822 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-9h7bf" podStartSLOduration=2.719807958 podStartE2EDuration="2.719807958s" podCreationTimestamp="2025-11-30 07:01:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:01:24.717766094 +0000 UTC m=+905.485937703" watchObservedRunningTime="2025-11-30 07:01:24.719807958 +0000 UTC m=+905.487979567" Nov 30 07:01:28 crc kubenswrapper[4941]: I1130 07:01:28.303061 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:28 crc kubenswrapper[4941]: I1130 07:01:28.348882 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:28 crc kubenswrapper[4941]: I1130 07:01:28.537070 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rwnff"] Nov 30 07:01:29 crc kubenswrapper[4941]: I1130 07:01:29.742938 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rwnff" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="registry-server" containerID="cri-o://b1e7fd88360a2580f91b4523a1f758f3511a715b3ab4c44477385eb01c2645cc" gracePeriod=2 Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.753498 4941 generic.go:334] "Generic (PLEG): container finished" podID="400719f0-9dcc-4932-8f56-099dbb450122" containerID="b1e7fd88360a2580f91b4523a1f758f3511a715b3ab4c44477385eb01c2645cc" exitCode=0 Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.753551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerDied","Data":"b1e7fd88360a2580f91b4523a1f758f3511a715b3ab4c44477385eb01c2645cc"} Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.820284 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.909183 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-catalog-content\") pod \"400719f0-9dcc-4932-8f56-099dbb450122\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.909284 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-utilities\") pod \"400719f0-9dcc-4932-8f56-099dbb450122\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.909448 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7swx9\" (UniqueName: \"kubernetes.io/projected/400719f0-9dcc-4932-8f56-099dbb450122-kube-api-access-7swx9\") pod \"400719f0-9dcc-4932-8f56-099dbb450122\" (UID: \"400719f0-9dcc-4932-8f56-099dbb450122\") " Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.910087 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-utilities" (OuterVolumeSpecName: "utilities") pod "400719f0-9dcc-4932-8f56-099dbb450122" (UID: "400719f0-9dcc-4932-8f56-099dbb450122"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.915175 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/400719f0-9dcc-4932-8f56-099dbb450122-kube-api-access-7swx9" (OuterVolumeSpecName: "kube-api-access-7swx9") pod "400719f0-9dcc-4932-8f56-099dbb450122" (UID: "400719f0-9dcc-4932-8f56-099dbb450122"). InnerVolumeSpecName "kube-api-access-7swx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:01:30 crc kubenswrapper[4941]: I1130 07:01:30.964372 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "400719f0-9dcc-4932-8f56-099dbb450122" (UID: "400719f0-9dcc-4932-8f56-099dbb450122"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.011166 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7swx9\" (UniqueName: \"kubernetes.io/projected/400719f0-9dcc-4932-8f56-099dbb450122-kube-api-access-7swx9\") on node \"crc\" DevicePath \"\"" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.011522 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.011611 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/400719f0-9dcc-4932-8f56-099dbb450122-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.762953 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rwnff" event={"ID":"400719f0-9dcc-4932-8f56-099dbb450122","Type":"ContainerDied","Data":"c18aecafd88d2363fdef211cdafac91b1bac55438c9d8adb959ee131c6dd6601"} Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.763002 4941 scope.go:117] "RemoveContainer" containerID="b1e7fd88360a2580f91b4523a1f758f3511a715b3ab4c44477385eb01c2645cc" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.763079 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rwnff" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.780472 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rwnff"] Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.785787 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rwnff"] Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.788614 4941 scope.go:117] "RemoveContainer" containerID="66ae34e9d30537e89cbd8c32064372986c802367dc27d507be8d1b1102639917" Nov 30 07:01:31 crc kubenswrapper[4941]: I1130 07:01:31.804215 4941 scope.go:117] "RemoveContainer" containerID="6386b5fabd5a63723c680ee0de504712b8131a72bbd0306eabf35288a6b3080f" Nov 30 07:01:33 crc kubenswrapper[4941]: I1130 07:01:33.531964 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="400719f0-9dcc-4932-8f56-099dbb450122" path="/var/lib/kubelet/pods/400719f0-9dcc-4932-8f56-099dbb450122/volumes" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.349405 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-dfm6s"] Nov 30 07:01:35 crc kubenswrapper[4941]: E1130 07:01:35.350018 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="extract-content" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.350029 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="extract-content" Nov 30 07:01:35 crc kubenswrapper[4941]: E1130 07:01:35.350041 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="extract-utilities" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.350047 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="extract-utilities" Nov 30 07:01:35 crc kubenswrapper[4941]: E1130 07:01:35.350061 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="registry-server" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.350067 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="registry-server" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.350181 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="400719f0-9dcc-4932-8f56-099dbb450122" containerName="registry-server" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.350621 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.352402 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-svf9w" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.352497 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.353524 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.361458 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dfm6s"] Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.474472 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fpbm\" (UniqueName: \"kubernetes.io/projected/153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea-kube-api-access-5fpbm\") pod \"openstack-operator-index-dfm6s\" (UID: \"153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea\") " pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.575668 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fpbm\" (UniqueName: \"kubernetes.io/projected/153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea-kube-api-access-5fpbm\") pod \"openstack-operator-index-dfm6s\" (UID: \"153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea\") " pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.600739 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fpbm\" (UniqueName: \"kubernetes.io/projected/153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea-kube-api-access-5fpbm\") pod \"openstack-operator-index-dfm6s\" (UID: \"153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea\") " pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:35 crc kubenswrapper[4941]: I1130 07:01:35.679466 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:36 crc kubenswrapper[4941]: I1130 07:01:36.193575 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-dfm6s"] Nov 30 07:01:36 crc kubenswrapper[4941]: I1130 07:01:36.810171 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dfm6s" event={"ID":"153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea","Type":"ContainerStarted","Data":"6c7375fa35006465e2d558a73a6872148566debb7f9fde34c09474f1b2943d0d"} Nov 30 07:01:37 crc kubenswrapper[4941]: I1130 07:01:37.819307 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-dfm6s" event={"ID":"153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea","Type":"ContainerStarted","Data":"eb72cc311ebc8ae144f2052c0046fe69b8641758148d69579211200bc7998194"} Nov 30 07:01:37 crc kubenswrapper[4941]: I1130 07:01:37.844522 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-dfm6s" podStartSLOduration=2.01468678 podStartE2EDuration="2.844496439s" podCreationTimestamp="2025-11-30 07:01:35 +0000 UTC" firstStartedPulling="2025-11-30 07:01:36.205411995 +0000 UTC m=+916.973583604" lastFinishedPulling="2025-11-30 07:01:37.035221644 +0000 UTC m=+917.803393263" observedRunningTime="2025-11-30 07:01:37.835845568 +0000 UTC m=+918.604017217" watchObservedRunningTime="2025-11-30 07:01:37.844496439 +0000 UTC m=+918.612668078" Nov 30 07:01:45 crc kubenswrapper[4941]: I1130 07:01:45.680106 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:45 crc kubenswrapper[4941]: I1130 07:01:45.680833 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:45 crc kubenswrapper[4941]: I1130 07:01:45.710774 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:45 crc kubenswrapper[4941]: I1130 07:01:45.909033 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-dfm6s" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.394791 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k"] Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.397399 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.401825 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-mmtwb" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.410872 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k"] Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.468503 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.468603 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lspd5\" (UniqueName: \"kubernetes.io/projected/5edf826b-ce6f-4810-b400-743a0f02d05e-kube-api-access-lspd5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.468656 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.569722 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.569788 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lspd5\" (UniqueName: \"kubernetes.io/projected/5edf826b-ce6f-4810-b400-743a0f02d05e-kube-api-access-lspd5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.569821 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.570235 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.570297 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.610084 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lspd5\" (UniqueName: \"kubernetes.io/projected/5edf826b-ce6f-4810-b400-743a0f02d05e-kube-api-access-lspd5\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:48 crc kubenswrapper[4941]: I1130 07:01:48.721257 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:49 crc kubenswrapper[4941]: I1130 07:01:49.118543 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k"] Nov 30 07:01:49 crc kubenswrapper[4941]: I1130 07:01:49.898922 4941 generic.go:334] "Generic (PLEG): container finished" podID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerID="2594bd452958292ed51e556b4306e7bc1a57a3501b44c18e6338231e6a282a3b" exitCode=0 Nov 30 07:01:49 crc kubenswrapper[4941]: I1130 07:01:49.899011 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" event={"ID":"5edf826b-ce6f-4810-b400-743a0f02d05e","Type":"ContainerDied","Data":"2594bd452958292ed51e556b4306e7bc1a57a3501b44c18e6338231e6a282a3b"} Nov 30 07:01:49 crc kubenswrapper[4941]: I1130 07:01:49.899163 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" event={"ID":"5edf826b-ce6f-4810-b400-743a0f02d05e","Type":"ContainerStarted","Data":"ac435d98968efa467b46358748a332dcde66c8b3ad74e3cc86c8bfb070987560"} Nov 30 07:01:50 crc kubenswrapper[4941]: I1130 07:01:50.906665 4941 generic.go:334] "Generic (PLEG): container finished" podID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerID="72212f17289263d4cc27f006202a87e450b4ec78450915ae958e4c3d97161a60" exitCode=0 Nov 30 07:01:50 crc kubenswrapper[4941]: I1130 07:01:50.906732 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" event={"ID":"5edf826b-ce6f-4810-b400-743a0f02d05e","Type":"ContainerDied","Data":"72212f17289263d4cc27f006202a87e450b4ec78450915ae958e4c3d97161a60"} Nov 30 07:01:51 crc kubenswrapper[4941]: I1130 07:01:51.916051 4941 generic.go:334] "Generic (PLEG): container finished" podID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerID="d61f1caa90855ac09dadc29192e79b946c505b8a36aa4a3a2a8ffe7ff800e13b" exitCode=0 Nov 30 07:01:51 crc kubenswrapper[4941]: I1130 07:01:51.916113 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" event={"ID":"5edf826b-ce6f-4810-b400-743a0f02d05e","Type":"ContainerDied","Data":"d61f1caa90855ac09dadc29192e79b946c505b8a36aa4a3a2a8ffe7ff800e13b"} Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.246476 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.339857 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-bundle\") pod \"5edf826b-ce6f-4810-b400-743a0f02d05e\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.339914 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lspd5\" (UniqueName: \"kubernetes.io/projected/5edf826b-ce6f-4810-b400-743a0f02d05e-kube-api-access-lspd5\") pod \"5edf826b-ce6f-4810-b400-743a0f02d05e\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.339947 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-util\") pod \"5edf826b-ce6f-4810-b400-743a0f02d05e\" (UID: \"5edf826b-ce6f-4810-b400-743a0f02d05e\") " Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.340862 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-bundle" (OuterVolumeSpecName: "bundle") pod "5edf826b-ce6f-4810-b400-743a0f02d05e" (UID: "5edf826b-ce6f-4810-b400-743a0f02d05e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.347451 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5edf826b-ce6f-4810-b400-743a0f02d05e-kube-api-access-lspd5" (OuterVolumeSpecName: "kube-api-access-lspd5") pod "5edf826b-ce6f-4810-b400-743a0f02d05e" (UID: "5edf826b-ce6f-4810-b400-743a0f02d05e"). InnerVolumeSpecName "kube-api-access-lspd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.360890 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-util" (OuterVolumeSpecName: "util") pod "5edf826b-ce6f-4810-b400-743a0f02d05e" (UID: "5edf826b-ce6f-4810-b400-743a0f02d05e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.441955 4941 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.441993 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lspd5\" (UniqueName: \"kubernetes.io/projected/5edf826b-ce6f-4810-b400-743a0f02d05e-kube-api-access-lspd5\") on node \"crc\" DevicePath \"\"" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.442008 4941 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5edf826b-ce6f-4810-b400-743a0f02d05e-util\") on node \"crc\" DevicePath \"\"" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.933242 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" event={"ID":"5edf826b-ce6f-4810-b400-743a0f02d05e","Type":"ContainerDied","Data":"ac435d98968efa467b46358748a332dcde66c8b3ad74e3cc86c8bfb070987560"} Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.933293 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k" Nov 30 07:01:53 crc kubenswrapper[4941]: I1130 07:01:53.933294 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac435d98968efa467b46358748a332dcde66c8b3ad74e3cc86c8bfb070987560" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.570743 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n"] Nov 30 07:02:00 crc kubenswrapper[4941]: E1130 07:02:00.571776 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="pull" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.571797 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="pull" Nov 30 07:02:00 crc kubenswrapper[4941]: E1130 07:02:00.571873 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="util" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.571889 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="util" Nov 30 07:02:00 crc kubenswrapper[4941]: E1130 07:02:00.571909 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="extract" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.571923 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="extract" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.572117 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5edf826b-ce6f-4810-b400-743a0f02d05e" containerName="extract" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.572842 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.577251 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-x4m7t" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.593742 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n"] Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.749604 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sh7fq\" (UniqueName: \"kubernetes.io/projected/a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940-kube-api-access-sh7fq\") pod \"openstack-operator-controller-operator-6ddddd9d6f-zbv6n\" (UID: \"a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.850807 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sh7fq\" (UniqueName: \"kubernetes.io/projected/a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940-kube-api-access-sh7fq\") pod \"openstack-operator-controller-operator-6ddddd9d6f-zbv6n\" (UID: \"a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.884133 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sh7fq\" (UniqueName: \"kubernetes.io/projected/a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940-kube-api-access-sh7fq\") pod \"openstack-operator-controller-operator-6ddddd9d6f-zbv6n\" (UID: \"a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:00 crc kubenswrapper[4941]: I1130 07:02:00.888598 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:01 crc kubenswrapper[4941]: I1130 07:02:01.332625 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n"] Nov 30 07:02:02 crc kubenswrapper[4941]: I1130 07:02:01.997634 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" event={"ID":"a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940","Type":"ContainerStarted","Data":"6020c6c8fae8ee59c6e05814fbfff312e99a5547c2b6df9a42f45bb5a0a853c1"} Nov 30 07:02:06 crc kubenswrapper[4941]: I1130 07:02:06.023497 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" event={"ID":"a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940","Type":"ContainerStarted","Data":"512f1863878107992bb1873c89a74bfce14b06853d0cd2b931c15d583c59c4bb"} Nov 30 07:02:06 crc kubenswrapper[4941]: I1130 07:02:06.026468 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:06 crc kubenswrapper[4941]: I1130 07:02:06.068113 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" podStartSLOduration=1.9727114239999999 podStartE2EDuration="6.06809081s" podCreationTimestamp="2025-11-30 07:02:00 +0000 UTC" firstStartedPulling="2025-11-30 07:02:01.340674956 +0000 UTC m=+942.108846555" lastFinishedPulling="2025-11-30 07:02:05.436054342 +0000 UTC m=+946.204225941" observedRunningTime="2025-11-30 07:02:06.065002633 +0000 UTC m=+946.833174282" watchObservedRunningTime="2025-11-30 07:02:06.06809081 +0000 UTC m=+946.836262429" Nov 30 07:02:10 crc kubenswrapper[4941]: I1130 07:02:10.891567 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-zbv6n" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.855737 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.857671 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.861369 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-drlv5" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.862961 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.864343 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.865949 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-rgjst" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.888991 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.894998 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.895956 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.897872 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-m7ntn" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.915372 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.916379 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.918857 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-bxmj8" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.935669 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.937181 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.944703 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-4b92m" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.945849 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.947194 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.958632 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-hxzp8" Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.961650 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.970342 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n"] Nov 30 07:02:29 crc kubenswrapper[4941]: I1130 07:02:29.980446 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.000390 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.007303 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.013647 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.014689 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.015965 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmmsp\" (UniqueName: \"kubernetes.io/projected/9389b152-75d5-41ce-8638-f531ad93710d-kube-api-access-gmmsp\") pod \"designate-operator-controller-manager-78b4bc895b-vk24n\" (UID: \"9389b152-75d5-41ce-8638-f531ad93710d\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.016001 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2xl6\" (UniqueName: \"kubernetes.io/projected/e324349c-14cd-4043-9278-783e8faa883e-kube-api-access-x2xl6\") pod \"cinder-operator-controller-manager-859b6ccc6-59m9r\" (UID: \"e324349c-14cd-4043-9278-783e8faa883e\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.016033 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7hfp\" (UniqueName: \"kubernetes.io/projected/a5e74e68-5dfa-41e0-9301-09f3e59450fe-kube-api-access-g7hfp\") pod \"heat-operator-controller-manager-5f64f6f8bb-2vldw\" (UID: \"a5e74e68-5dfa-41e0-9301-09f3e59450fe\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.016054 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js4r6\" (UniqueName: \"kubernetes.io/projected/689afda6-cba6-4975-be84-09c20304ae05-kube-api-access-js4r6\") pod \"barbican-operator-controller-manager-7d9dfd778-7gqrn\" (UID: \"689afda6-cba6-4975-be84-09c20304ae05\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.016074 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff4l4\" (UniqueName: \"kubernetes.io/projected/d1af403d-ad9d-464b-b9f8-60a57868b8fb-kube-api-access-ff4l4\") pod \"glance-operator-controller-manager-668d9c48b9-kwm52\" (UID: \"d1af403d-ad9d-464b-b9f8-60a57868b8fb\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.019262 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.019307 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-dp6kf" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.034392 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.035472 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.038666 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-45th2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.051776 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.063667 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.083386 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.084584 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.089427 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.089808 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-6d85q" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.098060 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.099123 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.101295 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-bhb6h" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117763 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbb8g\" (UniqueName: \"kubernetes.io/projected/5ac344b8-6b4f-45a7-afbd-666188065ed6-kube-api-access-kbb8g\") pod \"horizon-operator-controller-manager-68c6d99b8f-crskg\" (UID: \"5ac344b8-6b4f-45a7-afbd-666188065ed6\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117807 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7hfp\" (UniqueName: \"kubernetes.io/projected/a5e74e68-5dfa-41e0-9301-09f3e59450fe-kube-api-access-g7hfp\") pod \"heat-operator-controller-manager-5f64f6f8bb-2vldw\" (UID: \"a5e74e68-5dfa-41e0-9301-09f3e59450fe\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117835 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js4r6\" (UniqueName: \"kubernetes.io/projected/689afda6-cba6-4975-be84-09c20304ae05-kube-api-access-js4r6\") pod \"barbican-operator-controller-manager-7d9dfd778-7gqrn\" (UID: \"689afda6-cba6-4975-be84-09c20304ae05\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117856 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117876 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff4l4\" (UniqueName: \"kubernetes.io/projected/d1af403d-ad9d-464b-b9f8-60a57868b8fb-kube-api-access-ff4l4\") pod \"glance-operator-controller-manager-668d9c48b9-kwm52\" (UID: \"d1af403d-ad9d-464b-b9f8-60a57868b8fb\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117922 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpznp\" (UniqueName: \"kubernetes.io/projected/3e37a585-c770-4472-bf53-4be22b98550a-kube-api-access-zpznp\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117955 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmmsp\" (UniqueName: \"kubernetes.io/projected/9389b152-75d5-41ce-8638-f531ad93710d-kube-api-access-gmmsp\") pod \"designate-operator-controller-manager-78b4bc895b-vk24n\" (UID: \"9389b152-75d5-41ce-8638-f531ad93710d\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.117977 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgr5m\" (UniqueName: \"kubernetes.io/projected/af1b065d-e876-4fd1-b63e-6c5015b7c169-kube-api-access-hgr5m\") pod \"ironic-operator-controller-manager-6c548fd776-fg9bt\" (UID: \"af1b065d-e876-4fd1-b63e-6c5015b7c169\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.118003 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2xl6\" (UniqueName: \"kubernetes.io/projected/e324349c-14cd-4043-9278-783e8faa883e-kube-api-access-x2xl6\") pod \"cinder-operator-controller-manager-859b6ccc6-59m9r\" (UID: \"e324349c-14cd-4043-9278-783e8faa883e\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.119794 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.127164 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.130642 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-mk99t" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.171886 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.172886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js4r6\" (UniqueName: \"kubernetes.io/projected/689afda6-cba6-4975-be84-09c20304ae05-kube-api-access-js4r6\") pod \"barbican-operator-controller-manager-7d9dfd778-7gqrn\" (UID: \"689afda6-cba6-4975-be84-09c20304ae05\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.199173 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7hfp\" (UniqueName: \"kubernetes.io/projected/a5e74e68-5dfa-41e0-9301-09f3e59450fe-kube-api-access-g7hfp\") pod \"heat-operator-controller-manager-5f64f6f8bb-2vldw\" (UID: \"a5e74e68-5dfa-41e0-9301-09f3e59450fe\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.194563 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.208958 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2xl6\" (UniqueName: \"kubernetes.io/projected/e324349c-14cd-4043-9278-783e8faa883e-kube-api-access-x2xl6\") pod \"cinder-operator-controller-manager-859b6ccc6-59m9r\" (UID: \"e324349c-14cd-4043-9278-783e8faa883e\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.209120 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff4l4\" (UniqueName: \"kubernetes.io/projected/d1af403d-ad9d-464b-b9f8-60a57868b8fb-kube-api-access-ff4l4\") pod \"glance-operator-controller-manager-668d9c48b9-kwm52\" (UID: \"d1af403d-ad9d-464b-b9f8-60a57868b8fb\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.213262 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219125 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qwzx\" (UniqueName: \"kubernetes.io/projected/15d00f84-d2c3-445d-b411-3f0bca56234e-kube-api-access-9qwzx\") pod \"keystone-operator-controller-manager-546d4bdf48-g2xq2\" (UID: \"15d00f84-d2c3-445d-b411-3f0bca56234e\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219177 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbb8g\" (UniqueName: \"kubernetes.io/projected/5ac344b8-6b4f-45a7-afbd-666188065ed6-kube-api-access-kbb8g\") pod \"horizon-operator-controller-manager-68c6d99b8f-crskg\" (UID: \"5ac344b8-6b4f-45a7-afbd-666188065ed6\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219197 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blddb\" (UniqueName: \"kubernetes.io/projected/7f3cfd2e-bec9-46f6-9161-92a9b33d38ac-kube-api-access-blddb\") pod \"mariadb-operator-controller-manager-56bbcc9d85-kzvdx\" (UID: \"7f3cfd2e-bec9-46f6-9161-92a9b33d38ac\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:30 crc kubenswrapper[4941]: E1130 07:02:30.219378 4941 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219228 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: E1130 07:02:30.219802 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert podName:3e37a585-c770-4472-bf53-4be22b98550a nodeName:}" failed. No retries permitted until 2025-11-30 07:02:30.719409011 +0000 UTC m=+971.487580620 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert") pod "infra-operator-controller-manager-57548d458d-5vh4l" (UID: "3e37a585-c770-4472-bf53-4be22b98550a") : secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219816 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq5qx\" (UniqueName: \"kubernetes.io/projected/cc51d757-c6d0-4fb1-9b26-4cb90ceacc60-kube-api-access-tq5qx\") pod \"manila-operator-controller-manager-6546668bfd-zscsf\" (UID: \"cc51d757-c6d0-4fb1-9b26-4cb90ceacc60\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219852 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpznp\" (UniqueName: \"kubernetes.io/projected/3e37a585-c770-4472-bf53-4be22b98550a-kube-api-access-zpznp\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.219891 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgr5m\" (UniqueName: \"kubernetes.io/projected/af1b065d-e876-4fd1-b63e-6c5015b7c169-kube-api-access-hgr5m\") pod \"ironic-operator-controller-manager-6c548fd776-fg9bt\" (UID: \"af1b065d-e876-4fd1-b63e-6c5015b7c169\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.237784 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmmsp\" (UniqueName: \"kubernetes.io/projected/9389b152-75d5-41ce-8638-f531ad93710d-kube-api-access-gmmsp\") pod \"designate-operator-controller-manager-78b4bc895b-vk24n\" (UID: \"9389b152-75d5-41ce-8638-f531ad93710d\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.238366 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.246900 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbb8g\" (UniqueName: \"kubernetes.io/projected/5ac344b8-6b4f-45a7-afbd-666188065ed6-kube-api-access-kbb8g\") pod \"horizon-operator-controller-manager-68c6d99b8f-crskg\" (UID: \"5ac344b8-6b4f-45a7-afbd-666188065ed6\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.256754 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.257180 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.257706 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgr5m\" (UniqueName: \"kubernetes.io/projected/af1b065d-e876-4fd1-b63e-6c5015b7c169-kube-api-access-hgr5m\") pod \"ironic-operator-controller-manager-6c548fd776-fg9bt\" (UID: \"af1b065d-e876-4fd1-b63e-6c5015b7c169\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.258697 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.260740 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-vhn26" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.266613 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpznp\" (UniqueName: \"kubernetes.io/projected/3e37a585-c770-4472-bf53-4be22b98550a-kube-api-access-zpznp\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.271189 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.294537 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.299463 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.300478 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.303437 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.309874 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.313133 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.319137 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.322128 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.323817 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ppp85" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.323937 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-f5mc7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.325008 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blddb\" (UniqueName: \"kubernetes.io/projected/7f3cfd2e-bec9-46f6-9161-92a9b33d38ac-kube-api-access-blddb\") pod \"mariadb-operator-controller-manager-56bbcc9d85-kzvdx\" (UID: \"7f3cfd2e-bec9-46f6-9161-92a9b33d38ac\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.325105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq5qx\" (UniqueName: \"kubernetes.io/projected/cc51d757-c6d0-4fb1-9b26-4cb90ceacc60-kube-api-access-tq5qx\") pod \"manila-operator-controller-manager-6546668bfd-zscsf\" (UID: \"cc51d757-c6d0-4fb1-9b26-4cb90ceacc60\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.325234 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qwzx\" (UniqueName: \"kubernetes.io/projected/15d00f84-d2c3-445d-b411-3f0bca56234e-kube-api-access-9qwzx\") pod \"keystone-operator-controller-manager-546d4bdf48-g2xq2\" (UID: \"15d00f84-d2c3-445d-b411-3f0bca56234e\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.337194 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.338912 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.366814 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-fzg2d" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.367834 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.377502 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.395234 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.396301 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.405950 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-txfdl" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.426657 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqw57\" (UniqueName: \"kubernetes.io/projected/2c71dfd3-27b4-4ec1-9983-2a8351bf8d59-kube-api-access-tqw57\") pod \"octavia-operator-controller-manager-998648c74-gl2n2\" (UID: \"2c71dfd3-27b4-4ec1-9983-2a8351bf8d59\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.426726 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mc94\" (UniqueName: \"kubernetes.io/projected/83803d31-cd83-4860-8129-b3b1d717aadd-kube-api-access-2mc94\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2ccs4\" (UID: \"83803d31-cd83-4860-8129-b3b1d717aadd\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.426767 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smsl8\" (UniqueName: \"kubernetes.io/projected/4f23afd3-e930-4f37-b76f-cb5a6e158796-kube-api-access-smsl8\") pod \"nova-operator-controller-manager-697bc559fc-wt4hg\" (UID: \"4f23afd3-e930-4f37-b76f-cb5a6e158796\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.426871 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgdsj\" (UniqueName: \"kubernetes.io/projected/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-kube-api-access-mgdsj\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.426892 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.446992 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blddb\" (UniqueName: \"kubernetes.io/projected/7f3cfd2e-bec9-46f6-9161-92a9b33d38ac-kube-api-access-blddb\") pod \"mariadb-operator-controller-manager-56bbcc9d85-kzvdx\" (UID: \"7f3cfd2e-bec9-46f6-9161-92a9b33d38ac\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.459970 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq5qx\" (UniqueName: \"kubernetes.io/projected/cc51d757-c6d0-4fb1-9b26-4cb90ceacc60-kube-api-access-tq5qx\") pod \"manila-operator-controller-manager-6546668bfd-zscsf\" (UID: \"cc51d757-c6d0-4fb1-9b26-4cb90ceacc60\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.460135 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-49bzg"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.461144 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.468694 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-pzcv6" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.495878 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qwzx\" (UniqueName: \"kubernetes.io/projected/15d00f84-d2c3-445d-b411-3f0bca56234e-kube-api-access-9qwzx\") pod \"keystone-operator-controller-manager-546d4bdf48-g2xq2\" (UID: \"15d00f84-d2c3-445d-b411-3f0bca56234e\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.501681 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.530421 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531622 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgdsj\" (UniqueName: \"kubernetes.io/projected/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-kube-api-access-mgdsj\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531656 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531683 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqw57\" (UniqueName: \"kubernetes.io/projected/2c71dfd3-27b4-4ec1-9983-2a8351bf8d59-kube-api-access-tqw57\") pod \"octavia-operator-controller-manager-998648c74-gl2n2\" (UID: \"2c71dfd3-27b4-4ec1-9983-2a8351bf8d59\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531715 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr2k4\" (UniqueName: \"kubernetes.io/projected/17c5c370-aa15-4666-92dc-3ba34847a487-kube-api-access-zr2k4\") pod \"ovn-operator-controller-manager-b6456fdb6-l6kp5\" (UID: \"17c5c370-aa15-4666-92dc-3ba34847a487\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531760 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mc94\" (UniqueName: \"kubernetes.io/projected/83803d31-cd83-4860-8129-b3b1d717aadd-kube-api-access-2mc94\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2ccs4\" (UID: \"83803d31-cd83-4860-8129-b3b1d717aadd\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531783 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4qlh\" (UniqueName: \"kubernetes.io/projected/6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e-kube-api-access-l4qlh\") pod \"placement-operator-controller-manager-78f8948974-49bzg\" (UID: \"6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.531817 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smsl8\" (UniqueName: \"kubernetes.io/projected/4f23afd3-e930-4f37-b76f-cb5a6e158796-kube-api-access-smsl8\") pod \"nova-operator-controller-manager-697bc559fc-wt4hg\" (UID: \"4f23afd3-e930-4f37-b76f-cb5a6e158796\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:02:30 crc kubenswrapper[4941]: E1130 07:02:30.532254 4941 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:30 crc kubenswrapper[4941]: E1130 07:02:30.532291 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert podName:8be70097-8f2d-4a40-8d1f-57eadb38d1f2 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:31.032278766 +0000 UTC m=+971.800450375 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" (UID: "8be70097-8f2d-4a40-8d1f-57eadb38d1f2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.599947 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mc94\" (UniqueName: \"kubernetes.io/projected/83803d31-cd83-4860-8129-b3b1d717aadd-kube-api-access-2mc94\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2ccs4\" (UID: \"83803d31-cd83-4860-8129-b3b1d717aadd\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.603346 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqw57\" (UniqueName: \"kubernetes.io/projected/2c71dfd3-27b4-4ec1-9983-2a8351bf8d59-kube-api-access-tqw57\") pod \"octavia-operator-controller-manager-998648c74-gl2n2\" (UID: \"2c71dfd3-27b4-4ec1-9983-2a8351bf8d59\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.603712 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.604107 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smsl8\" (UniqueName: \"kubernetes.io/projected/4f23afd3-e930-4f37-b76f-cb5a6e158796-kube-api-access-smsl8\") pod \"nova-operator-controller-manager-697bc559fc-wt4hg\" (UID: \"4f23afd3-e930-4f37-b76f-cb5a6e158796\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.606228 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.617709 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgdsj\" (UniqueName: \"kubernetes.io/projected/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-kube-api-access-mgdsj\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.647657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr2k4\" (UniqueName: \"kubernetes.io/projected/17c5c370-aa15-4666-92dc-3ba34847a487-kube-api-access-zr2k4\") pod \"ovn-operator-controller-manager-b6456fdb6-l6kp5\" (UID: \"17c5c370-aa15-4666-92dc-3ba34847a487\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.647724 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4qlh\" (UniqueName: \"kubernetes.io/projected/6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e-kube-api-access-l4qlh\") pod \"placement-operator-controller-manager-78f8948974-49bzg\" (UID: \"6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.649128 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.664098 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.665707 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.670063 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-49bzg"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.680085 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-nnqvt" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.688106 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4qlh\" (UniqueName: \"kubernetes.io/projected/6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e-kube-api-access-l4qlh\") pod \"placement-operator-controller-manager-78f8948974-49bzg\" (UID: \"6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.695312 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.697429 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr2k4\" (UniqueName: \"kubernetes.io/projected/17c5c370-aa15-4666-92dc-3ba34847a487-kube-api-access-zr2k4\") pod \"ovn-operator-controller-manager-b6456fdb6-l6kp5\" (UID: \"17c5c370-aa15-4666-92dc-3ba34847a487\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.711302 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.712009 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.714879 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.716087 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.719695 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-xc8nn" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.732258 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.748399 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.749107 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jjxq\" (UniqueName: \"kubernetes.io/projected/dedc405b-7c3e-4df1-afe4-63658d5a92ef-kube-api-access-7jjxq\") pod \"swift-operator-controller-manager-5f8c65bbfc-85qhp\" (UID: \"dedc405b-7c3e-4df1-afe4-63658d5a92ef\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.749190 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:30 crc kubenswrapper[4941]: E1130 07:02:30.749342 4941 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:30 crc kubenswrapper[4941]: E1130 07:02:30.749386 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert podName:3e37a585-c770-4472-bf53-4be22b98550a nodeName:}" failed. No retries permitted until 2025-11-30 07:02:31.749373064 +0000 UTC m=+972.517544673 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert") pod "infra-operator-controller-manager-57548d458d-5vh4l" (UID: "3e37a585-c770-4472-bf53-4be22b98550a") : secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.760786 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.782435 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d628w"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.783626 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.786334 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-g62kz" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.790010 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d628w"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.845967 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.848171 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.856799 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.859770 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.866117 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-2nmln" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.867305 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.869292 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrv5j\" (UniqueName: \"kubernetes.io/projected/4cd61cf4-a463-445a-83f1-2598f698d53a-kube-api-access-zrv5j\") pod \"telemetry-operator-controller-manager-76cc84c6bb-kx5z9\" (UID: \"4cd61cf4-a463-445a-83f1-2598f698d53a\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.869356 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jjxq\" (UniqueName: \"kubernetes.io/projected/dedc405b-7c3e-4df1-afe4-63658d5a92ef-kube-api-access-7jjxq\") pod \"swift-operator-controller-manager-5f8c65bbfc-85qhp\" (UID: \"dedc405b-7c3e-4df1-afe4-63658d5a92ef\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:30 crc kubenswrapper[4941]: W1130 07:02:30.891094 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1af403d_ad9d_464b_b9f8_60a57868b8fb.slice/crio-d3dd906c010054619c15a1e61537cb85c57c541c88aa42b9312ac39387aa112d WatchSource:0}: Error finding container d3dd906c010054619c15a1e61537cb85c57c541c88aa42b9312ac39387aa112d: Status 404 returned error can't find the container with id d3dd906c010054619c15a1e61537cb85c57c541c88aa42b9312ac39387aa112d Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.904819 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jjxq\" (UniqueName: \"kubernetes.io/projected/dedc405b-7c3e-4df1-afe4-63658d5a92ef-kube-api-access-7jjxq\") pod \"swift-operator-controller-manager-5f8c65bbfc-85qhp\" (UID: \"dedc405b-7c3e-4df1-afe4-63658d5a92ef\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.945665 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.947081 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.949459 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.949849 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-vzdv6" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.949965 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.955722 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.966155 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.967118 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.968769 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-46chv" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.970364 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vvmt\" (UniqueName: \"kubernetes.io/projected/3c1ab696-9519-4a69-82c6-4a7078a7472a-kube-api-access-8vvmt\") pod \"watcher-operator-controller-manager-769dc69bc-898p4\" (UID: \"3c1ab696-9519-4a69-82c6-4a7078a7472a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.970405 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mj5x\" (UniqueName: \"kubernetes.io/projected/1111c4ca-9dea-44f1-b391-e534c8c31476-kube-api-access-9mj5x\") pod \"test-operator-controller-manager-5854674fcc-d628w\" (UID: \"1111c4ca-9dea-44f1-b391-e534c8c31476\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.970424 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrv5j\" (UniqueName: \"kubernetes.io/projected/4cd61cf4-a463-445a-83f1-2598f698d53a-kube-api-access-zrv5j\") pod \"telemetry-operator-controller-manager-76cc84c6bb-kx5z9\" (UID: \"4cd61cf4-a463-445a-83f1-2598f698d53a\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.971755 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.982414 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52"] Nov 30 07:02:30 crc kubenswrapper[4941]: I1130 07:02:30.991448 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.004092 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrv5j\" (UniqueName: \"kubernetes.io/projected/4cd61cf4-a463-445a-83f1-2598f698d53a-kube-api-access-zrv5j\") pod \"telemetry-operator-controller-manager-76cc84c6bb-kx5z9\" (UID: \"4cd61cf4-a463-445a-83f1-2598f698d53a\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.009905 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071313 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071373 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vvmt\" (UniqueName: \"kubernetes.io/projected/3c1ab696-9519-4a69-82c6-4a7078a7472a-kube-api-access-8vvmt\") pod \"watcher-operator-controller-manager-769dc69bc-898p4\" (UID: \"3c1ab696-9519-4a69-82c6-4a7078a7472a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mj5x\" (UniqueName: \"kubernetes.io/projected/1111c4ca-9dea-44f1-b391-e534c8c31476-kube-api-access-9mj5x\") pod \"test-operator-controller-manager-5854674fcc-d628w\" (UID: \"1111c4ca-9dea-44f1-b391-e534c8c31476\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071442 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071469 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071492 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbl7q\" (UniqueName: \"kubernetes.io/projected/f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec-kube-api-access-mbl7q\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7mmgl\" (UID: \"f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.071536 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6b8g\" (UniqueName: \"kubernetes.io/projected/43133738-5033-4356-a2d6-7f0a9b78c7f8-kube-api-access-c6b8g\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.072037 4941 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.072074 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert podName:8be70097-8f2d-4a40-8d1f-57eadb38d1f2 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:32.072061214 +0000 UTC m=+972.840232823 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" (UID: "8be70097-8f2d-4a40-8d1f-57eadb38d1f2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.083135 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.096686 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mj5x\" (UniqueName: \"kubernetes.io/projected/1111c4ca-9dea-44f1-b391-e534c8c31476-kube-api-access-9mj5x\") pod \"test-operator-controller-manager-5854674fcc-d628w\" (UID: \"1111c4ca-9dea-44f1-b391-e534c8c31476\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.097841 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vvmt\" (UniqueName: \"kubernetes.io/projected/3c1ab696-9519-4a69-82c6-4a7078a7472a-kube-api-access-8vvmt\") pod \"watcher-operator-controller-manager-769dc69bc-898p4\" (UID: \"3c1ab696-9519-4a69-82c6-4a7078a7472a\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.182971 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.183029 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbl7q\" (UniqueName: \"kubernetes.io/projected/f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec-kube-api-access-mbl7q\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7mmgl\" (UID: \"f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.183069 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6b8g\" (UniqueName: \"kubernetes.io/projected/43133738-5033-4356-a2d6-7f0a9b78c7f8-kube-api-access-c6b8g\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.183116 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.183219 4941 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.183271 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:31.683255683 +0000 UTC m=+972.451427292 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "metrics-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.183485 4941 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.183513 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:31.683506571 +0000 UTC m=+972.451678180 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.185121 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.191942 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.194958 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.209259 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6b8g\" (UniqueName: \"kubernetes.io/projected/43133738-5033-4356-a2d6-7f0a9b78c7f8-kube-api-access-c6b8g\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.209715 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbl7q\" (UniqueName: \"kubernetes.io/projected/f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec-kube-api-access-mbl7q\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7mmgl\" (UID: \"f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.234829 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" event={"ID":"d1af403d-ad9d-464b-b9f8-60a57868b8fb","Type":"ContainerStarted","Data":"d3dd906c010054619c15a1e61537cb85c57c541c88aa42b9312ac39387aa112d"} Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.256447 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.259526 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" event={"ID":"9389b152-75d5-41ce-8638-f531ad93710d","Type":"ContainerStarted","Data":"d1b8facf08f2b8324247f876dd7c10da7e88d8c753ac207087b797745913b6a3"} Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.268984 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5e74e68_5dfa_41e0_9301_09f3e59450fe.slice/crio-2d783e925d03f74c498335bd21a6cfdb2d7c36fdcaf163c209e8118d931b067d WatchSource:0}: Error finding container 2d783e925d03f74c498335bd21a6cfdb2d7c36fdcaf163c209e8118d931b067d: Status 404 returned error can't find the container with id 2d783e925d03f74c498335bd21a6cfdb2d7c36fdcaf163c209e8118d931b067d Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.338103 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.585147 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.592560 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt"] Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.601557 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf1b065d_e876_4fd1_b63e_6c5015b7c169.slice/crio-aaec114ef9357a8cb9fd491999fcc1d9aaf789ca294d73f9a1d6b6ac19e6d9db WatchSource:0}: Error finding container aaec114ef9357a8cb9fd491999fcc1d9aaf789ca294d73f9a1d6b6ac19e6d9db: Status 404 returned error can't find the container with id aaec114ef9357a8cb9fd491999fcc1d9aaf789ca294d73f9a1d6b6ac19e6d9db Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.658829 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.677106 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.687962 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.689739 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.689803 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.689926 4941 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.689994 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:32.689975504 +0000 UTC m=+973.458147113 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "metrics-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.690040 4941 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.690079 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:32.690065467 +0000 UTC m=+973.458237076 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.701824 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83803d31_cd83_4860_8129_b3b1d717aadd.slice/crio-61d8b0c6038634ed6bc83371014c92aa2d55a9327f661d1ec0bc725578dce3e6 WatchSource:0}: Error finding container 61d8b0c6038634ed6bc83371014c92aa2d55a9327f661d1ec0bc725578dce3e6: Status 404 returned error can't find the container with id 61d8b0c6038634ed6bc83371014c92aa2d55a9327f661d1ec0bc725578dce3e6 Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.703684 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.720001 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.790704 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.790898 4941 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.790981 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert podName:3e37a585-c770-4472-bf53-4be22b98550a nodeName:}" failed. No retries permitted until 2025-11-30 07:02:33.790961965 +0000 UTC m=+974.559133564 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert") pod "infra-operator-controller-manager-57548d458d-5vh4l" (UID: "3e37a585-c770-4472-bf53-4be22b98550a") : secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.852092 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-49bzg"] Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.871190 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg"] Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.872136 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17c5c370_aa15_4666_92dc_3ba34847a487.slice/crio-4a6a5f1a01412af2a329d83932d4ddc6254812609dd702f0ebde37d8899b2f94 WatchSource:0}: Error finding container 4a6a5f1a01412af2a329d83932d4ddc6254812609dd702f0ebde37d8899b2f94: Status 404 returned error can't find the container with id 4a6a5f1a01412af2a329d83932d4ddc6254812609dd702f0ebde37d8899b2f94 Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.876535 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15d00f84_d2c3_445d_b411_3f0bca56234e.slice/crio-e1eb8aee3e543a4020b7281ee6830b97ccc0a4857eb0c4ff32e3cd72a2cf2849 WatchSource:0}: Error finding container e1eb8aee3e543a4020b7281ee6830b97ccc0a4857eb0c4ff32e3cd72a2cf2849: Status 404 returned error can't find the container with id e1eb8aee3e543a4020b7281ee6830b97ccc0a4857eb0c4ff32e3cd72a2cf2849 Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.880818 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5"] Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.907120 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-smsl8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-wt4hg_openstack-operators(4f23afd3-e930-4f37-b76f-cb5a6e158796): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.907848 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2"] Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.909595 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-smsl8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-wt4hg_openstack-operators(4f23afd3-e930-4f37-b76f-cb5a6e158796): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.911071 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" podUID="4f23afd3-e930-4f37-b76f-cb5a6e158796" Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.960070 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cd61cf4_a463_445a_83f1_2598f698d53a.slice/crio-15375672c331d158467598589909156dea6478458a0f127b16c7c262e664fbd4 WatchSource:0}: Error finding container 15375672c331d158467598589909156dea6478458a0f127b16c7c262e664fbd4: Status 404 returned error can't find the container with id 15375672c331d158467598589909156dea6478458a0f127b16c7c262e664fbd4 Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.963195 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zrv5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-kx5z9_openstack-operators(4cd61cf4-a463-445a-83f1-2598f698d53a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.963446 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9"] Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.965375 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zrv5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-kx5z9_openstack-operators(4cd61cf4-a463-445a-83f1-2598f698d53a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.966518 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" podUID="4cd61cf4-a463-445a-83f1-2598f698d53a" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.972271 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp"] Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.977041 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8vvmt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-898p4_openstack-operators(3c1ab696-9519-4a69-82c6-4a7078a7472a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.978992 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8vvmt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-898p4_openstack-operators(3c1ab696-9519-4a69-82c6-4a7078a7472a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.980484 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" podUID="3c1ab696-9519-4a69-82c6-4a7078a7472a" Nov 30 07:02:31 crc kubenswrapper[4941]: W1130 07:02:31.982209 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1111c4ca_9dea_44f1_b391_e534c8c31476.slice/crio-c543e4391fb5ddcba15c406439a77fcdc9c0b3804dc609675b5ffb212ab4172d WatchSource:0}: Error finding container c543e4391fb5ddcba15c406439a77fcdc9c0b3804dc609675b5ffb212ab4172d: Status 404 returned error can't find the container with id c543e4391fb5ddcba15c406439a77fcdc9c0b3804dc609675b5ffb212ab4172d Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.982244 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4"] Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.986094 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9mj5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d628w_openstack-operators(1111c4ca-9dea-44f1-b391-e534c8c31476): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: I1130 07:02:31.988188 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d628w"] Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.988904 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9mj5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d628w_openstack-operators(1111c4ca-9dea-44f1-b391-e534c8c31476): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:31 crc kubenswrapper[4941]: E1130 07:02:31.990220 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" podUID="1111c4ca-9dea-44f1-b391-e534c8c31476" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.050883 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl"] Nov 30 07:02:32 crc kubenswrapper[4941]: W1130 07:02:32.054301 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf27d34c3_fcc0_4180_8bc2_8d95bef6d2ec.slice/crio-f637794c739d4ef5d62384093e9888c6839ea75da4050fa94b5aedd39351b6bc WatchSource:0}: Error finding container f637794c739d4ef5d62384093e9888c6839ea75da4050fa94b5aedd39351b6bc: Status 404 returned error can't find the container with id f637794c739d4ef5d62384093e9888c6839ea75da4050fa94b5aedd39351b6bc Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.057047 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mbl7q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-7mmgl_openstack-operators(f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.058287 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" podUID="f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.102182 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.102375 4941 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.102443 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert podName:8be70097-8f2d-4a40-8d1f-57eadb38d1f2 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:34.102425952 +0000 UTC m=+974.870597561 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" (UID: "8be70097-8f2d-4a40-8d1f-57eadb38d1f2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.269655 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" event={"ID":"af1b065d-e876-4fd1-b63e-6c5015b7c169","Type":"ContainerStarted","Data":"aaec114ef9357a8cb9fd491999fcc1d9aaf789ca294d73f9a1d6b6ac19e6d9db"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.271156 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" event={"ID":"6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e","Type":"ContainerStarted","Data":"4b8a34ca8a70ab123b999dccd911a3577472d08dc0a5f289c0613b8ddb0a82bc"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.272200 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" event={"ID":"cc51d757-c6d0-4fb1-9b26-4cb90ceacc60","Type":"ContainerStarted","Data":"6da2bef5df59f32ac83cb98b682fce0d9059c9ebb920a392b40fd79f3aa9da9c"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.273815 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" event={"ID":"17c5c370-aa15-4666-92dc-3ba34847a487","Type":"ContainerStarted","Data":"4a6a5f1a01412af2a329d83932d4ddc6254812609dd702f0ebde37d8899b2f94"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.274932 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" event={"ID":"a5e74e68-5dfa-41e0-9301-09f3e59450fe","Type":"ContainerStarted","Data":"2d783e925d03f74c498335bd21a6cfdb2d7c36fdcaf163c209e8118d931b067d"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.276859 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" event={"ID":"5ac344b8-6b4f-45a7-afbd-666188065ed6","Type":"ContainerStarted","Data":"30671185db2dc451d05055a83a2c832f01b4551d4c45410406f858fd62573ab4"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.277773 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" event={"ID":"4cd61cf4-a463-445a-83f1-2598f698d53a","Type":"ContainerStarted","Data":"15375672c331d158467598589909156dea6478458a0f127b16c7c262e664fbd4"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.279022 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" event={"ID":"e324349c-14cd-4043-9278-783e8faa883e","Type":"ContainerStarted","Data":"321c52b0c5f6895ae3df208a1a4bd1e0ed0c7abcb8bccb8c8c04d44bdaf72890"} Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.280034 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" podUID="4cd61cf4-a463-445a-83f1-2598f698d53a" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.280754 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" event={"ID":"7f3cfd2e-bec9-46f6-9161-92a9b33d38ac","Type":"ContainerStarted","Data":"11eba5c3e46f2be9eaec8d1e700e4e2049586ed6f263d49ec7a2d191bfa550bc"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.282157 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" event={"ID":"3c1ab696-9519-4a69-82c6-4a7078a7472a","Type":"ContainerStarted","Data":"9e2b1710b49a2b32b210ffccd82921294cfda414c2bd7189140df7ac886e0b39"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.287985 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" event={"ID":"15d00f84-d2c3-445d-b411-3f0bca56234e","Type":"ContainerStarted","Data":"e1eb8aee3e543a4020b7281ee6830b97ccc0a4857eb0c4ff32e3cd72a2cf2849"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.288922 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" event={"ID":"dedc405b-7c3e-4df1-afe4-63658d5a92ef","Type":"ContainerStarted","Data":"dc26f7276530f6e8a7295956b028936c21f007538ca5958b46a928c5adc86f6c"} Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.289471 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" podUID="3c1ab696-9519-4a69-82c6-4a7078a7472a" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.290252 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" event={"ID":"2c71dfd3-27b4-4ec1-9983-2a8351bf8d59","Type":"ContainerStarted","Data":"c751b88febde546acb9e96910132ffa9e32cd3b0c602c82502b43db92f378808"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.300015 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" event={"ID":"1111c4ca-9dea-44f1-b391-e534c8c31476","Type":"ContainerStarted","Data":"c543e4391fb5ddcba15c406439a77fcdc9c0b3804dc609675b5ffb212ab4172d"} Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.302390 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" podUID="1111c4ca-9dea-44f1-b391-e534c8c31476" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.302480 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" event={"ID":"f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec","Type":"ContainerStarted","Data":"f637794c739d4ef5d62384093e9888c6839ea75da4050fa94b5aedd39351b6bc"} Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.303280 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" podUID="f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.303927 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" event={"ID":"4f23afd3-e930-4f37-b76f-cb5a6e158796","Type":"ContainerStarted","Data":"174484cfeefbac5e22714fd1ef562154ba330df893390bf1020f41bfdf5e08b6"} Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.305297 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" podUID="4f23afd3-e930-4f37-b76f-cb5a6e158796" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.305819 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" event={"ID":"689afda6-cba6-4975-be84-09c20304ae05","Type":"ContainerStarted","Data":"622dc1860e6f1b9b04ba79999775cb015f7f0ce2f7e151fa1acb1949bc12d1e6"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.306789 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" event={"ID":"83803d31-cd83-4860-8129-b3b1d717aadd","Type":"ContainerStarted","Data":"61d8b0c6038634ed6bc83371014c92aa2d55a9327f661d1ec0bc725578dce3e6"} Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.710175 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:32 crc kubenswrapper[4941]: I1130 07:02:32.710304 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.710405 4941 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.710458 4941 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.710498 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:34.710480435 +0000 UTC m=+975.478652044 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "webhook-server-cert" not found Nov 30 07:02:32 crc kubenswrapper[4941]: E1130 07:02:32.710580 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:34.710509846 +0000 UTC m=+975.478681455 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "metrics-server-cert" not found Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.327313 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" podUID="f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec" Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.328940 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" podUID="1111c4ca-9dea-44f1-b391-e534c8c31476" Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.329258 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" podUID="3c1ab696-9519-4a69-82c6-4a7078a7472a" Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.329350 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" podUID="4cd61cf4-a463-445a-83f1-2598f698d53a" Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.329698 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" podUID="4f23afd3-e930-4f37-b76f-cb5a6e158796" Nov 30 07:02:33 crc kubenswrapper[4941]: I1130 07:02:33.829052 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.829206 4941 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:33 crc kubenswrapper[4941]: E1130 07:02:33.829261 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert podName:3e37a585-c770-4472-bf53-4be22b98550a nodeName:}" failed. No retries permitted until 2025-11-30 07:02:37.829247814 +0000 UTC m=+978.597419423 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert") pod "infra-operator-controller-manager-57548d458d-5vh4l" (UID: "3e37a585-c770-4472-bf53-4be22b98550a") : secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:34 crc kubenswrapper[4941]: I1130 07:02:34.132928 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:34 crc kubenswrapper[4941]: E1130 07:02:34.133111 4941 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:34 crc kubenswrapper[4941]: E1130 07:02:34.133187 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert podName:8be70097-8f2d-4a40-8d1f-57eadb38d1f2 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:38.133168496 +0000 UTC m=+978.901340105 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" (UID: "8be70097-8f2d-4a40-8d1f-57eadb38d1f2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:34 crc kubenswrapper[4941]: I1130 07:02:34.743161 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:34 crc kubenswrapper[4941]: E1130 07:02:34.743414 4941 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 30 07:02:34 crc kubenswrapper[4941]: E1130 07:02:34.743519 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:38.74350155 +0000 UTC m=+979.511673159 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "metrics-server-cert" not found Nov 30 07:02:34 crc kubenswrapper[4941]: I1130 07:02:34.743549 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:34 crc kubenswrapper[4941]: E1130 07:02:34.743840 4941 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 30 07:02:34 crc kubenswrapper[4941]: E1130 07:02:34.743927 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:38.743908663 +0000 UTC m=+979.512080272 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "webhook-server-cert" not found Nov 30 07:02:37 crc kubenswrapper[4941]: I1130 07:02:37.892594 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:37 crc kubenswrapper[4941]: E1130 07:02:37.892755 4941 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:37 crc kubenswrapper[4941]: E1130 07:02:37.892938 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert podName:3e37a585-c770-4472-bf53-4be22b98550a nodeName:}" failed. No retries permitted until 2025-11-30 07:02:45.892920218 +0000 UTC m=+986.661091827 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert") pod "infra-operator-controller-manager-57548d458d-5vh4l" (UID: "3e37a585-c770-4472-bf53-4be22b98550a") : secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:38 crc kubenswrapper[4941]: I1130 07:02:38.197722 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:38 crc kubenswrapper[4941]: E1130 07:02:38.197949 4941 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:38 crc kubenswrapper[4941]: E1130 07:02:38.198041 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert podName:8be70097-8f2d-4a40-8d1f-57eadb38d1f2 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:46.198015488 +0000 UTC m=+986.966187107 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" (UID: "8be70097-8f2d-4a40-8d1f-57eadb38d1f2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:38 crc kubenswrapper[4941]: I1130 07:02:38.807939 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:38 crc kubenswrapper[4941]: I1130 07:02:38.808109 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:38 crc kubenswrapper[4941]: E1130 07:02:38.808162 4941 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 30 07:02:38 crc kubenswrapper[4941]: E1130 07:02:38.808241 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:46.808223448 +0000 UTC m=+987.576395057 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "webhook-server-cert" not found Nov 30 07:02:38 crc kubenswrapper[4941]: E1130 07:02:38.808245 4941 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 30 07:02:38 crc kubenswrapper[4941]: E1130 07:02:38.808311 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:02:46.808277309 +0000 UTC m=+987.576449008 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "metrics-server-cert" not found Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.691210 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-blddb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-kzvdx_openstack-operators(7f3cfd2e-bec9-46f6-9161-92a9b33d38ac): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.692917 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" podUID="7f3cfd2e-bec9-46f6-9161-92a9b33d38ac" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.693085 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tqw57,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-gl2n2_openstack-operators(2c71dfd3-27b4-4ec1-9983-2a8351bf8d59): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.694368 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" podUID="2c71dfd3-27b4-4ec1-9983-2a8351bf8d59" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.701369 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2mc94,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-2ccs4_openstack-operators(83803d31-cd83-4860-8129-b3b1d717aadd): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.703226 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" podUID="83803d31-cd83-4860-8129-b3b1d717aadd" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.708375 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tq5qx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6546668bfd-zscsf_openstack-operators(cc51d757-c6d0-4fb1-9b26-4cb90ceacc60): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 30 07:02:42 crc kubenswrapper[4941]: E1130 07:02:42.712088 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" podUID="cc51d757-c6d0-4fb1-9b26-4cb90ceacc60" Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.405068 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" event={"ID":"6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e","Type":"ContainerStarted","Data":"559084d6155de65eb73f3851463907824f996baae554ac1efcec4c8766d59f25"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.417984 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" event={"ID":"dedc405b-7c3e-4df1-afe4-63658d5a92ef","Type":"ContainerStarted","Data":"3553e40c59299b6d9b707401e3b8edfeb200ee54437e54df627a95df911f0e64"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.423886 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" event={"ID":"17c5c370-aa15-4666-92dc-3ba34847a487","Type":"ContainerStarted","Data":"ab2602bd54fb8e6575e9d9acf9d4c0c589e4725e9f118a5443b67f6863ffa22d"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.437980 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" event={"ID":"83803d31-cd83-4860-8129-b3b1d717aadd","Type":"ContainerStarted","Data":"7dbdb2674b72ab8bdbc6dc20cd63c75384b14ce8295d3b1f285fc67daa66f735"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.438272 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:43 crc kubenswrapper[4941]: E1130 07:02:43.440359 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" podUID="83803d31-cd83-4860-8129-b3b1d717aadd" Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.442254 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" event={"ID":"689afda6-cba6-4975-be84-09c20304ae05","Type":"ContainerStarted","Data":"1484299bf2515ebb40b9ddc4a64975e95ff07b7737401793f710fc395b3ddb92"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.445569 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" event={"ID":"2c71dfd3-27b4-4ec1-9983-2a8351bf8d59","Type":"ContainerStarted","Data":"b4659eb1b0e3338bb4fc8990a0b0e3415e248ac2b76dbfd03b432605af62d1c3"} Nov 30 07:02:43 crc kubenswrapper[4941]: E1130 07:02:43.449824 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" podUID="2c71dfd3-27b4-4ec1-9983-2a8351bf8d59" Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.452763 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" event={"ID":"7f3cfd2e-bec9-46f6-9161-92a9b33d38ac","Type":"ContainerStarted","Data":"69b1a79c45be3b893f500948b47d6380ace57d31fa74bdd324541d76d008e043"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.452879 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:43 crc kubenswrapper[4941]: E1130 07:02:43.454063 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" podUID="7f3cfd2e-bec9-46f6-9161-92a9b33d38ac" Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.457053 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" event={"ID":"a5e74e68-5dfa-41e0-9301-09f3e59450fe","Type":"ContainerStarted","Data":"4991b3663cc4d2010f6991fb2e0ddf1da38fb1d4a083d1b27bab943fcb4bd443"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.469272 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" event={"ID":"15d00f84-d2c3-445d-b411-3f0bca56234e","Type":"ContainerStarted","Data":"1c39de8caa4dd942d8316344d574487920da908827974c3422a49c4ca1b56165"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.475031 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" event={"ID":"af1b065d-e876-4fd1-b63e-6c5015b7c169","Type":"ContainerStarted","Data":"f527286d75fa3b820c30079b7c8f7031e7c3bcf8a2e5c5e87ccc5016af784dd1"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.479305 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" event={"ID":"d1af403d-ad9d-464b-b9f8-60a57868b8fb","Type":"ContainerStarted","Data":"29d1937edd5dac8fdb4367415efbe2a226e94f6b96fc7bc91352de2a6ddbfb51"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.482000 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" event={"ID":"9389b152-75d5-41ce-8638-f531ad93710d","Type":"ContainerStarted","Data":"49662e639cc614c44941c948d0277522fa20d8507f81ccaa1e7eb5016ac8d40f"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.483819 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" event={"ID":"cc51d757-c6d0-4fb1-9b26-4cb90ceacc60","Type":"ContainerStarted","Data":"c5f065d072d5b240c7b21fdd1a37eedf46253dbc3c1bdc81b2660df42ea7b11b"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.484024 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:43 crc kubenswrapper[4941]: E1130 07:02:43.485890 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" podUID="cc51d757-c6d0-4fb1-9b26-4cb90ceacc60" Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.498486 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" event={"ID":"e324349c-14cd-4043-9278-783e8faa883e","Type":"ContainerStarted","Data":"3f44ee8bdda9abc986b09098e7c0b79a75aea7925b56711b87df86e7fbf09974"} Nov 30 07:02:43 crc kubenswrapper[4941]: I1130 07:02:43.506417 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" event={"ID":"5ac344b8-6b4f-45a7-afbd-666188065ed6","Type":"ContainerStarted","Data":"857b2d2619f2e839272eeff59d658fc4059282ecf7ab2fa8a3612f8674401bb7"} Nov 30 07:02:44 crc kubenswrapper[4941]: I1130 07:02:44.518828 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:44 crc kubenswrapper[4941]: E1130 07:02:44.521950 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" podUID="7f3cfd2e-bec9-46f6-9161-92a9b33d38ac" Nov 30 07:02:44 crc kubenswrapper[4941]: E1130 07:02:44.522453 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" podUID="83803d31-cd83-4860-8129-b3b1d717aadd" Nov 30 07:02:44 crc kubenswrapper[4941]: E1130 07:02:44.522450 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" podUID="2c71dfd3-27b4-4ec1-9983-2a8351bf8d59" Nov 30 07:02:44 crc kubenswrapper[4941]: E1130 07:02:44.522499 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" podUID="cc51d757-c6d0-4fb1-9b26-4cb90ceacc60" Nov 30 07:02:45 crc kubenswrapper[4941]: E1130 07:02:45.535995 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" podUID="2c71dfd3-27b4-4ec1-9983-2a8351bf8d59" Nov 30 07:02:45 crc kubenswrapper[4941]: I1130 07:02:45.931077 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:02:45 crc kubenswrapper[4941]: E1130 07:02:45.931250 4941 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:45 crc kubenswrapper[4941]: E1130 07:02:45.931538 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert podName:3e37a585-c770-4472-bf53-4be22b98550a nodeName:}" failed. No retries permitted until 2025-11-30 07:03:01.931522289 +0000 UTC m=+1002.699693898 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert") pod "infra-operator-controller-manager-57548d458d-5vh4l" (UID: "3e37a585-c770-4472-bf53-4be22b98550a") : secret "infra-operator-webhook-server-cert" not found Nov 30 07:02:46 crc kubenswrapper[4941]: I1130 07:02:46.238654 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:02:46 crc kubenswrapper[4941]: E1130 07:02:46.238886 4941 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:46 crc kubenswrapper[4941]: E1130 07:02:46.238967 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert podName:8be70097-8f2d-4a40-8d1f-57eadb38d1f2 nodeName:}" failed. No retries permitted until 2025-11-30 07:03:02.238946341 +0000 UTC m=+1003.007117950 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" (UID: "8be70097-8f2d-4a40-8d1f-57eadb38d1f2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 30 07:02:46 crc kubenswrapper[4941]: I1130 07:02:46.546455 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" event={"ID":"689afda6-cba6-4975-be84-09c20304ae05","Type":"ContainerStarted","Data":"d0d9f996d95c83ccae6b957d11f7e97c4671a369e6d04f59e4297c630b08df01"} Nov 30 07:02:46 crc kubenswrapper[4941]: I1130 07:02:46.546704 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:46 crc kubenswrapper[4941]: I1130 07:02:46.561001 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" podStartSLOduration=2.87405346 podStartE2EDuration="17.560984538s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.296103073 +0000 UTC m=+972.064274682" lastFinishedPulling="2025-11-30 07:02:45.983034151 +0000 UTC m=+986.751205760" observedRunningTime="2025-11-30 07:02:46.559613264 +0000 UTC m=+987.327784883" watchObservedRunningTime="2025-11-30 07:02:46.560984538 +0000 UTC m=+987.329156147" Nov 30 07:02:46 crc kubenswrapper[4941]: I1130 07:02:46.847698 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:46 crc kubenswrapper[4941]: I1130 07:02:46.847779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:02:46 crc kubenswrapper[4941]: E1130 07:02:46.847919 4941 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Nov 30 07:02:46 crc kubenswrapper[4941]: E1130 07:02:46.848005 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:03:02.847985284 +0000 UTC m=+1003.616156893 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "metrics-server-cert" not found Nov 30 07:02:46 crc kubenswrapper[4941]: E1130 07:02:46.847940 4941 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Nov 30 07:02:46 crc kubenswrapper[4941]: E1130 07:02:46.848078 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs podName:43133738-5033-4356-a2d6-7f0a9b78c7f8 nodeName:}" failed. No retries permitted until 2025-11-30 07:03:02.848062396 +0000 UTC m=+1003.616233995 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-nwtqx" (UID: "43133738-5033-4356-a2d6-7f0a9b78c7f8") : secret "webhook-server-cert" not found Nov 30 07:02:47 crc kubenswrapper[4941]: I1130 07:02:47.557386 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7gqrn" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.571098 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" event={"ID":"d1af403d-ad9d-464b-b9f8-60a57868b8fb","Type":"ContainerStarted","Data":"7d966db3a2546b233fdbb6e39bb102f84a15e8b0c11420b24c3cf56fe097c0dd"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.572589 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.573465 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" event={"ID":"9389b152-75d5-41ce-8638-f531ad93710d","Type":"ContainerStarted","Data":"cd47429e5235dbaf800a9b16a8904c045f3e89007b7becd827a141dca7b00a73"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.574414 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.574526 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.575850 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.581282 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" event={"ID":"f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec","Type":"ContainerStarted","Data":"0cddbf4622e8ff8b4f0a5fbc81c925829f3efa85a56ea530fda7454bf40beb25"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.584585 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" event={"ID":"e324349c-14cd-4043-9278-783e8faa883e","Type":"ContainerStarted","Data":"d094265c6b8aea7119f2828cec4035e3dcac295d3de973e0e248bc595b9cf632"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.584815 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.586632 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.586871 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" event={"ID":"af1b065d-e876-4fd1-b63e-6c5015b7c169","Type":"ContainerStarted","Data":"264baaec1347c77d69b5f10c348bc3cffc7190944c278675d5d1d286a52d3b87"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.587294 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.590542 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.594561 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-kwm52" podStartSLOduration=2.784884706 podStartE2EDuration="20.594536212s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:30.912578804 +0000 UTC m=+971.680750413" lastFinishedPulling="2025-11-30 07:02:48.72223031 +0000 UTC m=+989.490401919" observedRunningTime="2025-11-30 07:02:49.588651989 +0000 UTC m=+990.356823598" watchObservedRunningTime="2025-11-30 07:02:49.594536212 +0000 UTC m=+990.362707821" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.612061 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7mmgl" podStartSLOduration=2.883628991 podStartE2EDuration="19.612036696s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:32.056930598 +0000 UTC m=+972.825102207" lastFinishedPulling="2025-11-30 07:02:48.785338293 +0000 UTC m=+989.553509912" observedRunningTime="2025-11-30 07:02:49.607668561 +0000 UTC m=+990.375840170" watchObservedRunningTime="2025-11-30 07:02:49.612036696 +0000 UTC m=+990.380208305" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.615422 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" event={"ID":"4f23afd3-e930-4f37-b76f-cb5a6e158796","Type":"ContainerStarted","Data":"c81f15c56c4e049cbeaf40d04e01004285939412057b444dd04efc6f950a758e"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.623635 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" event={"ID":"dedc405b-7c3e-4df1-afe4-63658d5a92ef","Type":"ContainerStarted","Data":"f8dbc94f87bb3eabc0a1d10dff39ebe124227cf27190e246a972a1a106c8bdae"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.624791 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.631304 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.635753 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" event={"ID":"1111c4ca-9dea-44f1-b391-e534c8c31476","Type":"ContainerStarted","Data":"44d5dc9e0cb8b930478ea3a904c792a78bdf4e59d90909fcd75a1180e3f4f3e8"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.637900 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" event={"ID":"a5e74e68-5dfa-41e0-9301-09f3e59450fe","Type":"ContainerStarted","Data":"c0ae18c7c40b45c9c364944536fc577bd5a2d1bf9e22bc931f295e1476d5f3de"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.638821 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.643707 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.644039 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-59m9r" podStartSLOduration=3.518392531 podStartE2EDuration="20.644017251s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.599207961 +0000 UTC m=+972.367379570" lastFinishedPulling="2025-11-30 07:02:48.724832681 +0000 UTC m=+989.493004290" observedRunningTime="2025-11-30 07:02:49.638401967 +0000 UTC m=+990.406573596" watchObservedRunningTime="2025-11-30 07:02:49.644017251 +0000 UTC m=+990.412188860" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.658742 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-fg9bt" podStartSLOduration=3.536904666 podStartE2EDuration="20.658724898s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.602988558 +0000 UTC m=+972.371160167" lastFinishedPulling="2025-11-30 07:02:48.72480879 +0000 UTC m=+989.492980399" observedRunningTime="2025-11-30 07:02:49.655010283 +0000 UTC m=+990.423181892" watchObservedRunningTime="2025-11-30 07:02:49.658724898 +0000 UTC m=+990.426896507" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.676725 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" event={"ID":"17c5c370-aa15-4666-92dc-3ba34847a487","Type":"ContainerStarted","Data":"12e5496211262e7bd094d55559908902b4c75ceb0ca15394980beec5d600b714"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.677623 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.682839 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.685839 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-vk24n" podStartSLOduration=2.841257749 podStartE2EDuration="20.685813031s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:30.940473371 +0000 UTC m=+971.708644980" lastFinishedPulling="2025-11-30 07:02:48.785028653 +0000 UTC m=+989.553200262" observedRunningTime="2025-11-30 07:02:49.678185623 +0000 UTC m=+990.446357252" watchObservedRunningTime="2025-11-30 07:02:49.685813031 +0000 UTC m=+990.453984640" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.695259 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" event={"ID":"15d00f84-d2c3-445d-b411-3f0bca56234e","Type":"ContainerStarted","Data":"3e3788c9b34fb510677f6f322b3e67231afce6e9e55465e1b9835111f469e10d"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.697449 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.706015 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.724998 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" event={"ID":"5ac344b8-6b4f-45a7-afbd-666188065ed6","Type":"ContainerStarted","Data":"625fa5e4b4484a8e548a8535bb63e0d697f03be4434d2119ef77709000d7d40e"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.726063 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.727963 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" event={"ID":"6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e","Type":"ContainerStarted","Data":"50a03ab37ea649e05e83fc9efc9b95875aba18f23749f4468a7afe4036f27927"} Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.728642 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.734067 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.735852 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.742841 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2vldw" podStartSLOduration=3.808784382 podStartE2EDuration="20.742815354s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.301625124 +0000 UTC m=+972.069796743" lastFinishedPulling="2025-11-30 07:02:48.235656106 +0000 UTC m=+989.003827715" observedRunningTime="2025-11-30 07:02:49.732906976 +0000 UTC m=+990.501078585" watchObservedRunningTime="2025-11-30 07:02:49.742815354 +0000 UTC m=+990.510986963" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.792261 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-g2xq2" podStartSLOduration=3.959485802 podStartE2EDuration="20.792241332s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.879335954 +0000 UTC m=+972.647507563" lastFinishedPulling="2025-11-30 07:02:48.712091484 +0000 UTC m=+989.480263093" observedRunningTime="2025-11-30 07:02:49.770233237 +0000 UTC m=+990.538404846" watchObservedRunningTime="2025-11-30 07:02:49.792241332 +0000 UTC m=+990.560412931" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.816395 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-l6kp5" podStartSLOduration=2.971889286 podStartE2EDuration="19.816375462s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.877727964 +0000 UTC m=+972.645899573" lastFinishedPulling="2025-11-30 07:02:48.72221414 +0000 UTC m=+989.490385749" observedRunningTime="2025-11-30 07:02:49.790622061 +0000 UTC m=+990.558793660" watchObservedRunningTime="2025-11-30 07:02:49.816375462 +0000 UTC m=+990.584547071" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.897271 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-85qhp" podStartSLOduration=5.84789713 podStartE2EDuration="19.897250727s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.953835371 +0000 UTC m=+972.722006980" lastFinishedPulling="2025-11-30 07:02:46.003188978 +0000 UTC m=+986.771360577" observedRunningTime="2025-11-30 07:02:49.894885244 +0000 UTC m=+990.663056853" watchObservedRunningTime="2025-11-30 07:02:49.897250727 +0000 UTC m=+990.665422346" Nov 30 07:02:49 crc kubenswrapper[4941]: I1130 07:02:49.993755 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-49bzg" podStartSLOduration=5.132608162 podStartE2EDuration="19.993734919s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.898233271 +0000 UTC m=+972.666404880" lastFinishedPulling="2025-11-30 07:02:46.759360028 +0000 UTC m=+987.527531637" observedRunningTime="2025-11-30 07:02:49.96292456 +0000 UTC m=+990.731096169" watchObservedRunningTime="2025-11-30 07:02:49.993734919 +0000 UTC m=+990.761906528" Nov 30 07:02:50 crc kubenswrapper[4941]: I1130 07:02:50.025505 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-crskg" podStartSLOduration=6.680504434 podStartE2EDuration="21.025484186s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.690592444 +0000 UTC m=+972.458764043" lastFinishedPulling="2025-11-30 07:02:46.035572186 +0000 UTC m=+986.803743795" observedRunningTime="2025-11-30 07:02:49.997056331 +0000 UTC m=+990.765227930" watchObservedRunningTime="2025-11-30 07:02:50.025484186 +0000 UTC m=+990.793655795" Nov 30 07:02:50 crc kubenswrapper[4941]: I1130 07:02:50.607308 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" Nov 30 07:02:50 crc kubenswrapper[4941]: I1130 07:02:50.657016 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" Nov 30 07:02:50 crc kubenswrapper[4941]: I1130 07:02:50.737578 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" Nov 30 07:02:50 crc kubenswrapper[4941]: I1130 07:02:50.765121 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.744441 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" event={"ID":"cc51d757-c6d0-4fb1-9b26-4cb90ceacc60","Type":"ContainerStarted","Data":"60a8f9f1e57957108ed9568338806e3d673818fbdb09ac5ad49531cbf99806d1"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.747004 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" event={"ID":"2c71dfd3-27b4-4ec1-9983-2a8351bf8d59","Type":"ContainerStarted","Data":"1e80d299c69af79c77364aa8cc31a64dcd9f1404c7832af221f81b4a5539ec83"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.748729 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" event={"ID":"1111c4ca-9dea-44f1-b391-e534c8c31476","Type":"ContainerStarted","Data":"dc30aaeeea6d16697e3893f86a858ae582c6e53974cf7147d2700b411bb02c9b"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.749102 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.750280 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" event={"ID":"7f3cfd2e-bec9-46f6-9161-92a9b33d38ac","Type":"ContainerStarted","Data":"1afcb585a80d6fae7e2c5d3132fd652581c01840140d884e315c6710b96414dc"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.752277 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" event={"ID":"83803d31-cd83-4860-8129-b3b1d717aadd","Type":"ContainerStarted","Data":"4491706cada4b01af94ddaeec5343c5246a22c159fc7079023773d81d00d885e"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.754917 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" event={"ID":"4f23afd3-e930-4f37-b76f-cb5a6e158796","Type":"ContainerStarted","Data":"56e142adf3c12c66a0774ccb45042a6daf3065327a01c794327c49423faf9cf2"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.755151 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.756550 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" event={"ID":"4cd61cf4-a463-445a-83f1-2598f698d53a","Type":"ContainerStarted","Data":"09c8e17c684365b33272d352b885f9f1bbdc1f10bfea18b0b02d780f6937b9b2"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.756576 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" event={"ID":"4cd61cf4-a463-445a-83f1-2598f698d53a","Type":"ContainerStarted","Data":"053c8d0946866aadf50574bc54357346be98c1458204db22ef03b586f0bb87c1"} Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.777109 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-zscsf" podStartSLOduration=11.312838682 podStartE2EDuration="21.777088978s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.713608149 +0000 UTC m=+972.481779758" lastFinishedPulling="2025-11-30 07:02:42.177858445 +0000 UTC m=+982.946030054" observedRunningTime="2025-11-30 07:02:51.760292555 +0000 UTC m=+992.528464164" watchObservedRunningTime="2025-11-30 07:02:51.777088978 +0000 UTC m=+992.545260587" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.777859 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-kzvdx" podStartSLOduration=11.31764045 podStartE2EDuration="21.777853061s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.696568289 +0000 UTC m=+972.464739898" lastFinishedPulling="2025-11-30 07:02:42.1567809 +0000 UTC m=+982.924952509" observedRunningTime="2025-11-30 07:02:51.775549159 +0000 UTC m=+992.543720768" watchObservedRunningTime="2025-11-30 07:02:51.777853061 +0000 UTC m=+992.546024670" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.796237 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2ccs4" podStartSLOduration=11.343924227 podStartE2EDuration="21.796212612s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.713711942 +0000 UTC m=+972.481883551" lastFinishedPulling="2025-11-30 07:02:42.166000327 +0000 UTC m=+982.934171936" observedRunningTime="2025-11-30 07:02:51.794934922 +0000 UTC m=+992.563106531" watchObservedRunningTime="2025-11-30 07:02:51.796212612 +0000 UTC m=+992.564384221" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.819429 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" podStartSLOduration=5.001442252 podStartE2EDuration="21.819408194s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.906850979 +0000 UTC m=+972.675022588" lastFinishedPulling="2025-11-30 07:02:48.724816911 +0000 UTC m=+989.492988530" observedRunningTime="2025-11-30 07:02:51.817239136 +0000 UTC m=+992.585410745" watchObservedRunningTime="2025-11-30 07:02:51.819408194 +0000 UTC m=+992.587579803" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.832393 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-gl2n2" podStartSLOduration=11.404304985 podStartE2EDuration="21.832369577s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.732782235 +0000 UTC m=+972.500953844" lastFinishedPulling="2025-11-30 07:02:42.160846837 +0000 UTC m=+982.929018436" observedRunningTime="2025-11-30 07:02:51.829168347 +0000 UTC m=+992.597339956" watchObservedRunningTime="2025-11-30 07:02:51.832369577 +0000 UTC m=+992.600541196" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.855783 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" podStartSLOduration=3.104940555 podStartE2EDuration="21.855765625s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.962922593 +0000 UTC m=+972.731094202" lastFinishedPulling="2025-11-30 07:02:50.713747663 +0000 UTC m=+991.481919272" observedRunningTime="2025-11-30 07:02:51.853844125 +0000 UTC m=+992.622015734" watchObservedRunningTime="2025-11-30 07:02:51.855765625 +0000 UTC m=+992.623937234" Nov 30 07:02:51 crc kubenswrapper[4941]: I1130 07:02:51.872516 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" podStartSLOduration=5.136262376 podStartE2EDuration="21.872491865s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.985979681 +0000 UTC m=+972.754151290" lastFinishedPulling="2025-11-30 07:02:48.72220917 +0000 UTC m=+989.490380779" observedRunningTime="2025-11-30 07:02:51.87231265 +0000 UTC m=+992.640484259" watchObservedRunningTime="2025-11-30 07:02:51.872491865 +0000 UTC m=+992.640663474" Nov 30 07:02:52 crc kubenswrapper[4941]: I1130 07:02:52.763645 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" event={"ID":"3c1ab696-9519-4a69-82c6-4a7078a7472a","Type":"ContainerStarted","Data":"6fe98d786f7a66846faab0a80cae9b540d8b5a48df9a91cfce45e4c330408ca6"} Nov 30 07:02:52 crc kubenswrapper[4941]: I1130 07:02:52.763975 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" event={"ID":"3c1ab696-9519-4a69-82c6-4a7078a7472a","Type":"ContainerStarted","Data":"36016d06e3daa223bb8ff3aaae23b586f6cb77556c0ac5058307dfbe2a887f20"} Nov 30 07:02:52 crc kubenswrapper[4941]: I1130 07:02:52.764163 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:02:52 crc kubenswrapper[4941]: I1130 07:02:52.779263 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" podStartSLOduration=2.598889324 podStartE2EDuration="22.779244038s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:02:31.976863707 +0000 UTC m=+972.745035316" lastFinishedPulling="2025-11-30 07:02:52.157218421 +0000 UTC m=+992.925390030" observedRunningTime="2025-11-30 07:02:52.776054629 +0000 UTC m=+993.544226238" watchObservedRunningTime="2025-11-30 07:02:52.779244038 +0000 UTC m=+993.547415647" Nov 30 07:03:00 crc kubenswrapper[4941]: I1130 07:03:00.719037 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-wt4hg" Nov 30 07:03:01 crc kubenswrapper[4941]: I1130 07:03:01.083879 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:03:01 crc kubenswrapper[4941]: I1130 07:03:01.086259 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-kx5z9" Nov 30 07:03:01 crc kubenswrapper[4941]: I1130 07:03:01.188427 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d628w" Nov 30 07:03:01 crc kubenswrapper[4941]: I1130 07:03:01.196618 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-898p4" Nov 30 07:03:01 crc kubenswrapper[4941]: I1130 07:03:01.977518 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:03:01 crc kubenswrapper[4941]: I1130 07:03:01.985768 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3e37a585-c770-4472-bf53-4be22b98550a-cert\") pod \"infra-operator-controller-manager-57548d458d-5vh4l\" (UID: \"3e37a585-c770-4472-bf53-4be22b98550a\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.159934 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.282254 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.288686 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8be70097-8f2d-4a40-8d1f-57eadb38d1f2-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446nm7h7\" (UID: \"8be70097-8f2d-4a40-8d1f-57eadb38d1f2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.489893 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l"] Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.501191 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.560747 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.789709 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7"] Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.839643 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" event={"ID":"8be70097-8f2d-4a40-8d1f-57eadb38d1f2","Type":"ContainerStarted","Data":"833885e4f7e8df482687d099531558121e883e68bffbe52969559e5ac15a21c1"} Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.841581 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" event={"ID":"3e37a585-c770-4472-bf53-4be22b98550a","Type":"ContainerStarted","Data":"f6f2f8066bc5432b94738ad69174f750f30f3a7157f37b0caf889c35e067c208"} Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.892700 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.892866 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.903240 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.905041 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/43133738-5033-4356-a2d6-7f0a9b78c7f8-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-nwtqx\" (UID: \"43133738-5033-4356-a2d6-7f0a9b78c7f8\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.978690 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:03:02 crc kubenswrapper[4941]: I1130 07:03:02.979989 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:03:03 crc kubenswrapper[4941]: I1130 07:03:03.116510 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:03 crc kubenswrapper[4941]: W1130 07:03:03.555241 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43133738_5033_4356_a2d6_7f0a9b78c7f8.slice/crio-788beae6b7874468f9701ce3ed60505b599bd985f21aea11bc193d1f415bbdc3 WatchSource:0}: Error finding container 788beae6b7874468f9701ce3ed60505b599bd985f21aea11bc193d1f415bbdc3: Status 404 returned error can't find the container with id 788beae6b7874468f9701ce3ed60505b599bd985f21aea11bc193d1f415bbdc3 Nov 30 07:03:03 crc kubenswrapper[4941]: I1130 07:03:03.559645 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx"] Nov 30 07:03:03 crc kubenswrapper[4941]: I1130 07:03:03.856867 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" event={"ID":"43133738-5033-4356-a2d6-7f0a9b78c7f8","Type":"ContainerStarted","Data":"788beae6b7874468f9701ce3ed60505b599bd985f21aea11bc193d1f415bbdc3"} Nov 30 07:03:04 crc kubenswrapper[4941]: I1130 07:03:04.878178 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" event={"ID":"43133738-5033-4356-a2d6-7f0a9b78c7f8","Type":"ContainerStarted","Data":"13bd3f5ac18ff15bc1c2d4919816f537dde467bf63ceaaaa3fae68a14b195017"} Nov 30 07:03:04 crc kubenswrapper[4941]: I1130 07:03:04.878477 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.899353 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" event={"ID":"8be70097-8f2d-4a40-8d1f-57eadb38d1f2","Type":"ContainerStarted","Data":"8ca517b906ee9552e3fc3f7962fe3071e540210c57e4cea460ba83cb4f6d793b"} Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.899612 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" event={"ID":"8be70097-8f2d-4a40-8d1f-57eadb38d1f2","Type":"ContainerStarted","Data":"c4fdd7c29e488033cad4619393cba82dcc45d6d4e4f44c74e5b581d5313e4fb5"} Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.900652 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.908528 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" event={"ID":"3e37a585-c770-4472-bf53-4be22b98550a","Type":"ContainerStarted","Data":"c7defa6dcaa50afcacf83fcc0f07db715803c86b9e9eddfab2e41d0f0df5c7c4"} Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.908582 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" event={"ID":"3e37a585-c770-4472-bf53-4be22b98550a","Type":"ContainerStarted","Data":"07d4345d0bacc742e23f373333a852561f4c828679b6a4554d75f9451f59a613"} Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.908715 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.936396 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" podStartSLOduration=36.936372966 podStartE2EDuration="36.936372966s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:03:04.918645727 +0000 UTC m=+1005.686817346" watchObservedRunningTime="2025-11-30 07:03:06.936372966 +0000 UTC m=+1007.704544575" Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.958617 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" podStartSLOduration=33.329063695 podStartE2EDuration="36.958601317s" podCreationTimestamp="2025-11-30 07:02:30 +0000 UTC" firstStartedPulling="2025-11-30 07:03:02.798906395 +0000 UTC m=+1003.567078004" lastFinishedPulling="2025-11-30 07:03:06.428444017 +0000 UTC m=+1007.196615626" observedRunningTime="2025-11-30 07:03:06.936804879 +0000 UTC m=+1007.704976578" watchObservedRunningTime="2025-11-30 07:03:06.958601317 +0000 UTC m=+1007.726772926" Nov 30 07:03:06 crc kubenswrapper[4941]: I1130 07:03:06.961313 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" podStartSLOduration=34.59191998 podStartE2EDuration="37.961304221s" podCreationTimestamp="2025-11-30 07:02:29 +0000 UTC" firstStartedPulling="2025-11-30 07:03:02.500586946 +0000 UTC m=+1003.268758545" lastFinishedPulling="2025-11-30 07:03:05.869971177 +0000 UTC m=+1006.638142786" observedRunningTime="2025-11-30 07:03:06.954692525 +0000 UTC m=+1007.722864144" watchObservedRunningTime="2025-11-30 07:03:06.961304221 +0000 UTC m=+1007.729475840" Nov 30 07:03:12 crc kubenswrapper[4941]: I1130 07:03:12.171917 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-5vh4l" Nov 30 07:03:12 crc kubenswrapper[4941]: I1130 07:03:12.567918 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446nm7h7" Nov 30 07:03:13 crc kubenswrapper[4941]: I1130 07:03:13.123147 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-nwtqx" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.698422 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-557f57d995-lbqcr"] Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.700881 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.704576 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.704753 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.709720 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.710053 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-m7vs5" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.722153 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-lbqcr"] Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.763239 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-gzhrt"] Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.765246 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.767247 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.783385 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-gzhrt"] Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.881986 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8spn\" (UniqueName: \"kubernetes.io/projected/0edc204b-0866-4759-a9db-437c7c0cced6-kube-api-access-j8spn\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.882046 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1684d17-ee22-47ab-8561-f0c069d8b99a-config\") pod \"dnsmasq-dns-557f57d995-lbqcr\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.882104 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-config\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.882137 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-dns-svc\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.882164 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7clc\" (UniqueName: \"kubernetes.io/projected/b1684d17-ee22-47ab-8561-f0c069d8b99a-kube-api-access-g7clc\") pod \"dnsmasq-dns-557f57d995-lbqcr\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.983205 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-config\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.983268 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-dns-svc\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.983300 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7clc\" (UniqueName: \"kubernetes.io/projected/b1684d17-ee22-47ab-8561-f0c069d8b99a-kube-api-access-g7clc\") pod \"dnsmasq-dns-557f57d995-lbqcr\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.983355 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8spn\" (UniqueName: \"kubernetes.io/projected/0edc204b-0866-4759-a9db-437c7c0cced6-kube-api-access-j8spn\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.983397 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1684d17-ee22-47ab-8561-f0c069d8b99a-config\") pod \"dnsmasq-dns-557f57d995-lbqcr\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.984447 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-dns-svc\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.984457 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1684d17-ee22-47ab-8561-f0c069d8b99a-config\") pod \"dnsmasq-dns-557f57d995-lbqcr\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:30 crc kubenswrapper[4941]: I1130 07:03:30.984963 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-config\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.007714 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7clc\" (UniqueName: \"kubernetes.io/projected/b1684d17-ee22-47ab-8561-f0c069d8b99a-kube-api-access-g7clc\") pod \"dnsmasq-dns-557f57d995-lbqcr\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.015419 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8spn\" (UniqueName: \"kubernetes.io/projected/0edc204b-0866-4759-a9db-437c7c0cced6-kube-api-access-j8spn\") pod \"dnsmasq-dns-766fdc659c-gzhrt\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.021426 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.084313 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.313683 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-lbqcr"] Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.344228 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-nbnlx"] Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.345558 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.363759 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-nbnlx"] Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.490865 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf97b\" (UniqueName: \"kubernetes.io/projected/75700cb8-83c7-405b-b8a2-dc133534ef90-kube-api-access-lf97b\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.490965 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-dns-svc\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.490990 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-config\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.592982 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-dns-svc\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.593056 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-config\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.593123 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf97b\" (UniqueName: \"kubernetes.io/projected/75700cb8-83c7-405b-b8a2-dc133534ef90-kube-api-access-lf97b\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.594273 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-config\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.594354 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-dns-svc\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.614341 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-gzhrt"] Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.622804 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf97b\" (UniqueName: \"kubernetes.io/projected/75700cb8-83c7-405b-b8a2-dc133534ef90-kube-api-access-lf97b\") pod \"dnsmasq-dns-5cd665b7c7-nbnlx\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.652229 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-lbqcr"] Nov 30 07:03:31 crc kubenswrapper[4941]: W1130 07:03:31.656027 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1684d17_ee22_47ab_8561_f0c069d8b99a.slice/crio-01e7471704d4221f72b552f5356905cd2a423fac3cfaa4950b3f639896f24da4 WatchSource:0}: Error finding container 01e7471704d4221f72b552f5356905cd2a423fac3cfaa4950b3f639896f24da4: Status 404 returned error can't find the container with id 01e7471704d4221f72b552f5356905cd2a423fac3cfaa4950b3f639896f24da4 Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.690923 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:03:31 crc kubenswrapper[4941]: I1130 07:03:31.980812 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-gzhrt"] Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.021748 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-fcvvs"] Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.022991 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.027288 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-fcvvs"] Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.109713 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-nbnlx"] Nov 30 07:03:32 crc kubenswrapper[4941]: W1130 07:03:32.127457 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75700cb8_83c7_405b_b8a2_dc133534ef90.slice/crio-051559ab70fe5c3e88d6e1ec62671d4088bf55f3dfc5b592f0e63c679967de85 WatchSource:0}: Error finding container 051559ab70fe5c3e88d6e1ec62671d4088bf55f3dfc5b592f0e63c679967de85: Status 404 returned error can't find the container with id 051559ab70fe5c3e88d6e1ec62671d4088bf55f3dfc5b592f0e63c679967de85 Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.198146 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" event={"ID":"75700cb8-83c7-405b-b8a2-dc133534ef90","Type":"ContainerStarted","Data":"051559ab70fe5c3e88d6e1ec62671d4088bf55f3dfc5b592f0e63c679967de85"} Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.201531 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" event={"ID":"b1684d17-ee22-47ab-8561-f0c069d8b99a","Type":"ContainerStarted","Data":"01e7471704d4221f72b552f5356905cd2a423fac3cfaa4950b3f639896f24da4"} Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.203569 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" event={"ID":"0edc204b-0866-4759-a9db-437c7c0cced6","Type":"ContainerStarted","Data":"b6a68b2d63ee94c1b33c5aa3e31e7df82779c2ad96a98a1f0c4e4aee326884a6"} Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.213433 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgscv\" (UniqueName: \"kubernetes.io/projected/d850f0b1-322a-43e6-a98a-fe18676ada8a-kube-api-access-xgscv\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.213482 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.213513 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-config\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.314638 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgscv\" (UniqueName: \"kubernetes.io/projected/d850f0b1-322a-43e6-a98a-fe18676ada8a-kube-api-access-xgscv\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.314735 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.314769 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-config\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.315769 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-config\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.316185 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-dns-svc\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.341622 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgscv\" (UniqueName: \"kubernetes.io/projected/d850f0b1-322a-43e6-a98a-fe18676ada8a-kube-api-access-xgscv\") pod \"dnsmasq-dns-8446fd7c75-fcvvs\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.363029 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.543358 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.545163 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.549941 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.550404 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-z98tr" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.550582 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.550825 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.550959 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.551581 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.551608 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.551669 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628030 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628422 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628485 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628518 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628549 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45978317-0f07-44da-8b74-fbaaec0e6105-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628905 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.628957 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.629002 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.629057 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.629081 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjvtf\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-kube-api-access-gjvtf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.629109 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45978317-0f07-44da-8b74-fbaaec0e6105-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731010 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731062 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731084 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731104 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45978317-0f07-44da-8b74-fbaaec0e6105-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731164 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731245 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731273 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731302 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.731318 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjvtf\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-kube-api-access-gjvtf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.732286 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45978317-0f07-44da-8b74-fbaaec0e6105-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.732354 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.732473 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.734800 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.735388 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.736301 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.736769 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.737071 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.740069 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.740645 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45978317-0f07-44da-8b74-fbaaec0e6105-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.742016 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.744553 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45978317-0f07-44da-8b74-fbaaec0e6105-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.754557 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjvtf\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-kube-api-access-gjvtf\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.764428 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.872814 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.895126 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-fcvvs"] Nov 30 07:03:32 crc kubenswrapper[4941]: W1130 07:03:32.944622 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd850f0b1_322a_43e6_a98a_fe18676ada8a.slice/crio-c1633595662d73ceeccdc2145e380a1d6efc3a2ba4b4f4e37dbc249f9b08c51d WatchSource:0}: Error finding container c1633595662d73ceeccdc2145e380a1d6efc3a2ba4b4f4e37dbc249f9b08c51d: Status 404 returned error can't find the container with id c1633595662d73ceeccdc2145e380a1d6efc3a2ba4b4f4e37dbc249f9b08c51d Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.978202 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:03:32 crc kubenswrapper[4941]: I1130 07:03:32.978255 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.180198 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.181671 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.185083 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.185355 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.186040 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-bw4md" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.186215 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.186356 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.186469 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.186578 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.207638 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.216983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" event={"ID":"d850f0b1-322a-43e6-a98a-fe18676ada8a","Type":"ContainerStarted","Data":"c1633595662d73ceeccdc2145e380a1d6efc3a2ba4b4f4e37dbc249f9b08c51d"} Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343538 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343604 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343790 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343892 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343923 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343969 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.343993 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.344029 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.344146 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.344182 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvjw9\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-kube-api-access-dvjw9\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.344204 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445229 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445280 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445298 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445339 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445357 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvjw9\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-kube-api-access-dvjw9\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445374 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445399 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445417 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445485 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.445501 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.446445 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.447708 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.447948 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.448650 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.449103 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.449857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.457846 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.458141 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.469057 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.475269 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.479482 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvjw9\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-kube-api-access-dvjw9\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.495899 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.521138 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " pod="openstack/rabbitmq-server-0" Nov 30 07:03:33 crc kubenswrapper[4941]: W1130 07:03:33.530739 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45978317_0f07_44da_8b74_fbaaec0e6105.slice/crio-2c33db5f06c6093a3b0ef71abd136c7c7d058f6546b2a67cf83e619aa974ef2b WatchSource:0}: Error finding container 2c33db5f06c6093a3b0ef71abd136c7c7d058f6546b2a67cf83e619aa974ef2b: Status 404 returned error can't find the container with id 2c33db5f06c6093a3b0ef71abd136c7c7d058f6546b2a67cf83e619aa974ef2b Nov 30 07:03:33 crc kubenswrapper[4941]: I1130 07:03:33.808477 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.256495 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"45978317-0f07-44da-8b74-fbaaec0e6105","Type":"ContainerStarted","Data":"2c33db5f06c6093a3b0ef71abd136c7c7d058f6546b2a67cf83e619aa974ef2b"} Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.430338 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.554600 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.556893 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.560369 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-48dx7" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.560600 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.564170 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.576069 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.581249 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.581985 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667375 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667437 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667456 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667542 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667570 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667642 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-default\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667690 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5hsc\" (UniqueName: \"kubernetes.io/projected/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kube-api-access-s5hsc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.667732 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kolla-config\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771288 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771373 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771404 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771432 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-default\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5hsc\" (UniqueName: \"kubernetes.io/projected/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kube-api-access-s5hsc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771477 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kolla-config\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771519 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.771539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.772798 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.773438 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-default\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.773660 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.774038 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kolla-config\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.774729 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.778460 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.778810 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.795560 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5hsc\" (UniqueName: \"kubernetes.io/projected/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kube-api-access-s5hsc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.816969 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " pod="openstack/openstack-galera-0" Nov 30 07:03:34 crc kubenswrapper[4941]: I1130 07:03:34.897446 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.344504 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4e7a5ee5-1f0c-4819-a375-891a5e2cea03","Type":"ContainerStarted","Data":"9fdd8964b5683e2ba4930996421236d204bc14b89d58477a6e06575f555eeda0"} Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.507557 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:03:35 crc kubenswrapper[4941]: W1130 07:03:35.560317 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79ca3b29_7cdd_4923_a12c_2f350d4b8728.slice/crio-cd51c8555a2f56784fbfd3441828d11ee64a0a6d043a5f7298976d7819d57012 WatchSource:0}: Error finding container cd51c8555a2f56784fbfd3441828d11ee64a0a6d043a5f7298976d7819d57012: Status 404 returned error can't find the container with id cd51c8555a2f56784fbfd3441828d11ee64a0a6d043a5f7298976d7819d57012 Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.958056 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.960107 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.967774 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-cbdrk" Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.967994 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.968166 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.968264 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 30 07:03:35 crc kubenswrapper[4941]: I1130 07:03:35.975970 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.014991 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015059 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015343 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv99r\" (UniqueName: \"kubernetes.io/projected/fa90de41-9166-475c-925a-3d79b02a694d-kube-api-access-fv99r\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015520 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa90de41-9166-475c-925a-3d79b02a694d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015599 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015671 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015693 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.015777 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117476 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv99r\" (UniqueName: \"kubernetes.io/projected/fa90de41-9166-475c-925a-3d79b02a694d-kube-api-access-fv99r\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117526 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa90de41-9166-475c-925a-3d79b02a694d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117553 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117579 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117600 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117634 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117676 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117699 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.117983 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa90de41-9166-475c-925a-3d79b02a694d-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.118026 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.118783 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.119045 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.122003 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.124391 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.136816 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.137746 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv99r\" (UniqueName: \"kubernetes.io/projected/fa90de41-9166-475c-925a-3d79b02a694d-kube-api-access-fv99r\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.144784 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.294589 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.396196 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"79ca3b29-7cdd-4923-a12c-2f350d4b8728","Type":"ContainerStarted","Data":"cd51c8555a2f56784fbfd3441828d11ee64a0a6d043a5f7298976d7819d57012"} Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.811617 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.812644 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.816724 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-drcv5" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.816977 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.817613 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.853979 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.947216 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.947286 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.947312 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-config-data\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.947345 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kolla-config\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.947364 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx8gl\" (UniqueName: \"kubernetes.io/projected/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kube-api-access-lx8gl\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:36 crc kubenswrapper[4941]: I1130 07:03:36.952609 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.049222 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.049340 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.049379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-config-data\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.049421 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kolla-config\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.049447 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx8gl\" (UniqueName: \"kubernetes.io/projected/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kube-api-access-lx8gl\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.050823 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-config-data\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.051431 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kolla-config\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.062263 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.064547 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.079834 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx8gl\" (UniqueName: \"kubernetes.io/projected/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kube-api-access-lx8gl\") pod \"memcached-0\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.137488 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.453255 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa90de41-9166-475c-925a-3d79b02a694d","Type":"ContainerStarted","Data":"c69da793530b1efdff4181dc1a1be7c3f55323899c0dec0d9d589dec59dd1e79"} Nov 30 07:03:37 crc kubenswrapper[4941]: I1130 07:03:37.784300 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.478151 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c05c5cf3-bcb4-4307-a601-fbecde4f026b","Type":"ContainerStarted","Data":"7cd3a8f355128932455394c1f51e66da3f431efc5a1a14840d4cc183dcb61623"} Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.625403 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.627002 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.634617 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jk6db" Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.656809 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.680118 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w2kc\" (UniqueName: \"kubernetes.io/projected/ab69a64e-2e8a-4ee3-992a-50726935c90c-kube-api-access-4w2kc\") pod \"kube-state-metrics-0\" (UID: \"ab69a64e-2e8a-4ee3-992a-50726935c90c\") " pod="openstack/kube-state-metrics-0" Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.781746 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w2kc\" (UniqueName: \"kubernetes.io/projected/ab69a64e-2e8a-4ee3-992a-50726935c90c-kube-api-access-4w2kc\") pod \"kube-state-metrics-0\" (UID: \"ab69a64e-2e8a-4ee3-992a-50726935c90c\") " pod="openstack/kube-state-metrics-0" Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.828704 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w2kc\" (UniqueName: \"kubernetes.io/projected/ab69a64e-2e8a-4ee3-992a-50726935c90c-kube-api-access-4w2kc\") pod \"kube-state-metrics-0\" (UID: \"ab69a64e-2e8a-4ee3-992a-50726935c90c\") " pod="openstack/kube-state-metrics-0" Nov 30 07:03:38 crc kubenswrapper[4941]: I1130 07:03:38.976968 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.631762 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zcbz9"] Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.637701 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.641354 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.641607 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-5p7wd" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.641740 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.643798 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zcbz9"] Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.683215 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-slfpx"] Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.686052 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.697794 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-slfpx"] Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.734913 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.734950 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-log-ovn\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.734972 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flxgc\" (UniqueName: \"kubernetes.io/projected/af96ea99-9953-4e58-8ecc-0999730fcaf9-kube-api-access-flxgc\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.734995 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01076ff-d267-4931-8788-47eee9ebfd76-scripts\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735067 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqfqm\" (UniqueName: \"kubernetes.io/projected/e01076ff-d267-4931-8788-47eee9ebfd76-kube-api-access-cqfqm\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735136 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-log\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735155 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-combined-ca-bundle\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735195 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af96ea99-9953-4e58-8ecc-0999730fcaf9-scripts\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735211 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-run\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735391 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run-ovn\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735670 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-etc-ovs\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735784 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-ovn-controller-tls-certs\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.735857 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-lib\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.837379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-lib\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.837700 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.837725 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-log-ovn\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.837748 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flxgc\" (UniqueName: \"kubernetes.io/projected/af96ea99-9953-4e58-8ecc-0999730fcaf9-kube-api-access-flxgc\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.837814 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01076ff-d267-4931-8788-47eee9ebfd76-scripts\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838589 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqfqm\" (UniqueName: \"kubernetes.io/projected/e01076ff-d267-4931-8788-47eee9ebfd76-kube-api-access-cqfqm\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838641 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-log\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838210 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-log-ovn\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838682 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-combined-ca-bundle\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838141 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-lib\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838353 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.838989 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-log\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.839094 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af96ea99-9953-4e58-8ecc-0999730fcaf9-scripts\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.839121 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-run\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.839146 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run-ovn\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.839210 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-etc-ovs\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.839290 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-ovn-controller-tls-certs\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.839935 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-run\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.840070 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run-ovn\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.840241 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-etc-ovs\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.841122 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01076ff-d267-4931-8788-47eee9ebfd76-scripts\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.842495 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af96ea99-9953-4e58-8ecc-0999730fcaf9-scripts\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.850049 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-combined-ca-bundle\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.851719 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-ovn-controller-tls-certs\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.855790 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqfqm\" (UniqueName: \"kubernetes.io/projected/e01076ff-d267-4931-8788-47eee9ebfd76-kube-api-access-cqfqm\") pod \"ovn-controller-zcbz9\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.858082 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flxgc\" (UniqueName: \"kubernetes.io/projected/af96ea99-9953-4e58-8ecc-0999730fcaf9-kube-api-access-flxgc\") pod \"ovn-controller-ovs-slfpx\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:41 crc kubenswrapper[4941]: I1130 07:03:41.957590 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9" Nov 30 07:03:42 crc kubenswrapper[4941]: I1130 07:03:42.007228 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.597548 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.605189 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.612069 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.612400 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.612567 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-5kx9v" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.612717 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.612845 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.615695 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.693976 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694246 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694351 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxgcq\" (UniqueName: \"kubernetes.io/projected/14ce638b-1621-451a-80b6-0e13b6ffb734-kube-api-access-wxgcq\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694385 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694510 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694652 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-config\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694731 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.694848 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.796752 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.797138 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxgcq\" (UniqueName: \"kubernetes.io/projected/14ce638b-1621-451a-80b6-0e13b6ffb734-kube-api-access-wxgcq\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.797296 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.797531 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.797713 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-config\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.797191 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.797927 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.798073 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.798392 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.798442 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.798622 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-config\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.799416 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.804260 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.804308 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.809157 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.818816 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.820941 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxgcq\" (UniqueName: \"kubernetes.io/projected/14ce638b-1621-451a-80b6-0e13b6ffb734-kube-api-access-wxgcq\") pod \"ovsdbserver-sb-0\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:44 crc kubenswrapper[4941]: I1130 07:03:44.930546 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.769988 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.772588 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.774621 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-kw4jv" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.775703 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.775757 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.780826 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.788201 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.931218 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-config\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.932507 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.932983 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvcb8\" (UniqueName: \"kubernetes.io/projected/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-kube-api-access-mvcb8\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.933256 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.933588 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.934049 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.936188 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:45 crc kubenswrapper[4941]: I1130 07:03:45.936687 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.038801 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-config\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.040720 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.040762 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvcb8\" (UniqueName: \"kubernetes.io/projected/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-kube-api-access-mvcb8\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.040793 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.041010 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.041158 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.041208 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.041288 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.041846 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.042123 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.042117 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-config\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.044052 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.045145 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.048229 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.049107 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.061215 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvcb8\" (UniqueName: \"kubernetes.io/projected/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-kube-api-access-mvcb8\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.064171 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " pod="openstack/ovsdbserver-nb-0" Nov 30 07:03:46 crc kubenswrapper[4941]: I1130 07:03:46.099065 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 07:04:00 crc kubenswrapper[4941]: E1130 07:04:00.129550 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52" Nov 30 07:04:00 crc kubenswrapper[4941]: E1130 07:04:00.130266 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fv99r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(fa90de41-9166-475c-925a-3d79b02a694d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:04:00 crc kubenswrapper[4941]: E1130 07:04:00.131610 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="fa90de41-9166-475c-925a-3d79b02a694d" Nov 30 07:04:00 crc kubenswrapper[4941]: E1130 07:04:00.930259 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:5526be2fd8d8cdc035078fdbcb7de6b02c081147295a13f2b1e50e281ef17f52\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="fa90de41-9166-475c-925a-3d79b02a694d" Nov 30 07:04:02 crc kubenswrapper[4941]: I1130 07:04:02.978450 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:04:02 crc kubenswrapper[4941]: I1130 07:04:02.978844 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:04:02 crc kubenswrapper[4941]: I1130 07:04:02.978917 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:04:02 crc kubenswrapper[4941]: I1130 07:04:02.979937 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c56f9f6e36d888e40d67920be6ce9775fda0a6740b4e0f2b802e64b1e8fb285b"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:04:02 crc kubenswrapper[4941]: I1130 07:04:02.980030 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://c56f9f6e36d888e40d67920be6ce9775fda0a6740b4e0f2b802e64b1e8fb285b" gracePeriod=600 Nov 30 07:04:03 crc kubenswrapper[4941]: I1130 07:04:03.958366 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="c56f9f6e36d888e40d67920be6ce9775fda0a6740b4e0f2b802e64b1e8fb285b" exitCode=0 Nov 30 07:04:03 crc kubenswrapper[4941]: I1130 07:04:03.958419 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"c56f9f6e36d888e40d67920be6ce9775fda0a6740b4e0f2b802e64b1e8fb285b"} Nov 30 07:04:03 crc kubenswrapper[4941]: I1130 07:04:03.958514 4941 scope.go:117] "RemoveContainer" containerID="88a82781e74dc63c736752840ee31da64c053c5a7d4b1a678036abaa19f971dc" Nov 30 07:04:04 crc kubenswrapper[4941]: E1130 07:04:04.536418 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c" Nov 30 07:04:04 crc kubenswrapper[4941]: E1130 07:04:04.536993 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n656h699h87h66fhb7hf5h5b9h54bhd6h7chb5h55bh7bh5f7h5dh558h5fdh6bh67h59dhdch556hbfh65bhc8h56dh6fhdfh67fh5c4h5cbhb4q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lx8gl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(c05c5cf3-bcb4-4307-a601-fbecde4f026b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:04:04 crc kubenswrapper[4941]: E1130 07:04:04.539024 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" Nov 30 07:04:04 crc kubenswrapper[4941]: E1130 07:04:04.965884 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:0e00f2303db35259ffcd3d034f38ab9eb4cb089e268305a4165b5f86a18fce6c\\\"\"" pod="openstack/memcached-0" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.439210 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.439469 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j8spn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-766fdc659c-gzhrt_openstack(0edc204b-0866-4759-a9db-437c7c0cced6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.441846 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" podUID="0edc204b-0866-4759-a9db-437c7c0cced6" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.447457 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.447629 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g7clc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-557f57d995-lbqcr_openstack(b1684d17-ee22-47ab-8561-f0c069d8b99a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.449486 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" podUID="b1684d17-ee22-47ab-8561-f0c069d8b99a" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.488800 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.488937 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xgscv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-8446fd7c75-fcvvs_openstack(d850f0b1-322a-43e6-a98a-fe18676ada8a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.490129 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" podUID="d850f0b1-322a-43e6-a98a-fe18676ada8a" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.499196 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.499388 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lf97b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5cd665b7c7-nbnlx_openstack(75700cb8-83c7-405b-b8a2-dc133534ef90): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.501578 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" podUID="75700cb8-83c7-405b-b8a2-dc133534ef90" Nov 30 07:04:05 crc kubenswrapper[4941]: W1130 07:04:05.910506 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode01076ff_d267_4931_8788_47eee9ebfd76.slice/crio-138e8808dd91953fcac23eb199fdb811bb1b86dd2260525c2884d5339786f8ab WatchSource:0}: Error finding container 138e8808dd91953fcac23eb199fdb811bb1b86dd2260525c2884d5339786f8ab: Status 404 returned error can't find the container with id 138e8808dd91953fcac23eb199fdb811bb1b86dd2260525c2884d5339786f8ab Nov 30 07:04:05 crc kubenswrapper[4941]: I1130 07:04:05.912124 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zcbz9"] Nov 30 07:04:05 crc kubenswrapper[4941]: I1130 07:04:05.974738 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"79ca3b29-7cdd-4923-a12c-2f350d4b8728","Type":"ContainerStarted","Data":"6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7"} Nov 30 07:04:05 crc kubenswrapper[4941]: I1130 07:04:05.978009 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9" event={"ID":"e01076ff-d267-4931-8788-47eee9ebfd76","Type":"ContainerStarted","Data":"138e8808dd91953fcac23eb199fdb811bb1b86dd2260525c2884d5339786f8ab"} Nov 30 07:04:05 crc kubenswrapper[4941]: I1130 07:04:05.980960 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"03f6ba41bf367eb3ad6a0ca9a42efb4ebf757994a2964d83030fd8e83c2c7d32"} Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.982745 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627\\\"\"" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" podUID="d850f0b1-322a-43e6-a98a-fe18676ada8a" Nov 30 07:04:05 crc kubenswrapper[4941]: E1130 07:04:05.982958 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:4218330ae90f65f4a2c1d93334812c4d04a4ed1d46013269252aba16e1138627\\\"\"" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" podUID="75700cb8-83c7-405b-b8a2-dc133534ef90" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.109829 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.150132 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-slfpx"] Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.233033 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 07:04:06 crc kubenswrapper[4941]: W1130 07:04:06.392185 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea6e32f6_a5d9_4b23_9588_2ea6be572e72.slice/crio-033ce2886ced490e6be076782e2970fa4ea1eca22b8faae5840b93e9311e948b WatchSource:0}: Error finding container 033ce2886ced490e6be076782e2970fa4ea1eca22b8faae5840b93e9311e948b: Status 404 returned error can't find the container with id 033ce2886ced490e6be076782e2970fa4ea1eca22b8faae5840b93e9311e948b Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.773865 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.784194 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.800361 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.948911 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7clc\" (UniqueName: \"kubernetes.io/projected/b1684d17-ee22-47ab-8561-f0c069d8b99a-kube-api-access-g7clc\") pod \"b1684d17-ee22-47ab-8561-f0c069d8b99a\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.949086 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-config\") pod \"0edc204b-0866-4759-a9db-437c7c0cced6\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.949117 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1684d17-ee22-47ab-8561-f0c069d8b99a-config\") pod \"b1684d17-ee22-47ab-8561-f0c069d8b99a\" (UID: \"b1684d17-ee22-47ab-8561-f0c069d8b99a\") " Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.949203 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-dns-svc\") pod \"0edc204b-0866-4759-a9db-437c7c0cced6\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.949228 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8spn\" (UniqueName: \"kubernetes.io/projected/0edc204b-0866-4759-a9db-437c7c0cced6-kube-api-access-j8spn\") pod \"0edc204b-0866-4759-a9db-437c7c0cced6\" (UID: \"0edc204b-0866-4759-a9db-437c7c0cced6\") " Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.949906 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-config" (OuterVolumeSpecName: "config") pod "0edc204b-0866-4759-a9db-437c7c0cced6" (UID: "0edc204b-0866-4759-a9db-437c7c0cced6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.949876 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0edc204b-0866-4759-a9db-437c7c0cced6" (UID: "0edc204b-0866-4759-a9db-437c7c0cced6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.950316 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1684d17-ee22-47ab-8561-f0c069d8b99a-config" (OuterVolumeSpecName: "config") pod "b1684d17-ee22-47ab-8561-f0c069d8b99a" (UID: "b1684d17-ee22-47ab-8561-f0c069d8b99a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.955151 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1684d17-ee22-47ab-8561-f0c069d8b99a-kube-api-access-g7clc" (OuterVolumeSpecName: "kube-api-access-g7clc") pod "b1684d17-ee22-47ab-8561-f0c069d8b99a" (UID: "b1684d17-ee22-47ab-8561-f0c069d8b99a"). InnerVolumeSpecName "kube-api-access-g7clc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.965521 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0edc204b-0866-4759-a9db-437c7c0cced6-kube-api-access-j8spn" (OuterVolumeSpecName: "kube-api-access-j8spn") pod "0edc204b-0866-4759-a9db-437c7c0cced6" (UID: "0edc204b-0866-4759-a9db-437c7c0cced6"). InnerVolumeSpecName "kube-api-access-j8spn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.988643 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ab69a64e-2e8a-4ee3-992a-50726935c90c","Type":"ContainerStarted","Data":"680401b99a5edecf9745ebc590dc84410517c18ef14238907216b9e91dc06b56"} Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.992658 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"45978317-0f07-44da-8b74-fbaaec0e6105","Type":"ContainerStarted","Data":"4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479"} Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.994986 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" event={"ID":"b1684d17-ee22-47ab-8561-f0c069d8b99a","Type":"ContainerDied","Data":"01e7471704d4221f72b552f5356905cd2a423fac3cfaa4950b3f639896f24da4"} Nov 30 07:04:06 crc kubenswrapper[4941]: I1130 07:04:06.995018 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557f57d995-lbqcr" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.013164 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.013194 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-766fdc659c-gzhrt" event={"ID":"0edc204b-0866-4759-a9db-437c7c0cced6","Type":"ContainerDied","Data":"b6a68b2d63ee94c1b33c5aa3e31e7df82779c2ad96a98a1f0c4e4aee326884a6"} Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.029011 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ea6e32f6-a5d9-4b23-9588-2ea6be572e72","Type":"ContainerStarted","Data":"033ce2886ced490e6be076782e2970fa4ea1eca22b8faae5840b93e9311e948b"} Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.030206 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"14ce638b-1621-451a-80b6-0e13b6ffb734","Type":"ContainerStarted","Data":"227bbadacaa7e6571399e5d6495a5f4cbfcef7397463c1ea0e5d1d8f57402712"} Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.032752 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerStarted","Data":"8a597546ac104c631a8abd11a04d992ca57a97a8f4c9e35b4d0ebd08223bd0a2"} Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.035540 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4e7a5ee5-1f0c-4819-a375-891a5e2cea03","Type":"ContainerStarted","Data":"0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22"} Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.056511 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.056562 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1684d17-ee22-47ab-8561-f0c069d8b99a-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.056577 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0edc204b-0866-4759-a9db-437c7c0cced6-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.056590 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8spn\" (UniqueName: \"kubernetes.io/projected/0edc204b-0866-4759-a9db-437c7c0cced6-kube-api-access-j8spn\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.056602 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7clc\" (UniqueName: \"kubernetes.io/projected/b1684d17-ee22-47ab-8561-f0c069d8b99a-kube-api-access-g7clc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.073060 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-lbqcr"] Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.075283 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-557f57d995-lbqcr"] Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.124456 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-gzhrt"] Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.150128 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-766fdc659c-gzhrt"] Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.534404 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0edc204b-0866-4759-a9db-437c7c0cced6" path="/var/lib/kubelet/pods/0edc204b-0866-4759-a9db-437c7c0cced6/volumes" Nov 30 07:04:07 crc kubenswrapper[4941]: I1130 07:04:07.535117 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1684d17-ee22-47ab-8561-f0c069d8b99a" path="/var/lib/kubelet/pods/b1684d17-ee22-47ab-8561-f0c069d8b99a/volumes" Nov 30 07:04:10 crc kubenswrapper[4941]: I1130 07:04:10.058820 4941 generic.go:334] "Generic (PLEG): container finished" podID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerID="6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7" exitCode=0 Nov 30 07:04:10 crc kubenswrapper[4941]: I1130 07:04:10.059061 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"79ca3b29-7cdd-4923-a12c-2f350d4b8728","Type":"ContainerDied","Data":"6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.075627 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9" event={"ID":"e01076ff-d267-4931-8788-47eee9ebfd76","Type":"ContainerStarted","Data":"fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.077327 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-zcbz9" Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.078647 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ea6e32f6-a5d9-4b23-9588-2ea6be572e72","Type":"ContainerStarted","Data":"eba3ae86f525c89da8b2e9bcc6b9bd66ffd75c1417b2bfa7df563071df5d0c63"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.082365 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"14ce638b-1621-451a-80b6-0e13b6ffb734","Type":"ContainerStarted","Data":"07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.084442 4941 generic.go:334] "Generic (PLEG): container finished" podID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerID="1523a6a7a83aaaf7a71979e336c4ecae83bc030afc2723bba30b9e6b531966fa" exitCode=0 Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.084502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerDied","Data":"1523a6a7a83aaaf7a71979e336c4ecae83bc030afc2723bba30b9e6b531966fa"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.088864 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"79ca3b29-7cdd-4923-a12c-2f350d4b8728","Type":"ContainerStarted","Data":"b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.090546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ab69a64e-2e8a-4ee3-992a-50726935c90c","Type":"ContainerStarted","Data":"2058ab41ac78e9c36944b4e0398f24a4d71f927eba605bf782b4bf814c0342e4"} Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.091483 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.105383 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-zcbz9" podStartSLOduration=25.768691043 podStartE2EDuration="30.105315258s" podCreationTimestamp="2025-11-30 07:03:41 +0000 UTC" firstStartedPulling="2025-11-30 07:04:05.912655557 +0000 UTC m=+1066.680827166" lastFinishedPulling="2025-11-30 07:04:10.249279762 +0000 UTC m=+1071.017451381" observedRunningTime="2025-11-30 07:04:11.099012412 +0000 UTC m=+1071.867184021" watchObservedRunningTime="2025-11-30 07:04:11.105315258 +0000 UTC m=+1071.873486867" Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.143919 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=28.928275297 podStartE2EDuration="33.143901198s" podCreationTimestamp="2025-11-30 07:03:38 +0000 UTC" firstStartedPulling="2025-11-30 07:04:06.127463678 +0000 UTC m=+1066.895635287" lastFinishedPulling="2025-11-30 07:04:10.343089579 +0000 UTC m=+1071.111261188" observedRunningTime="2025-11-30 07:04:11.119432167 +0000 UTC m=+1071.887603776" watchObservedRunningTime="2025-11-30 07:04:11.143901198 +0000 UTC m=+1071.912072817" Nov 30 07:04:11 crc kubenswrapper[4941]: I1130 07:04:11.155632 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.325280178 podStartE2EDuration="38.155607412s" podCreationTimestamp="2025-11-30 07:03:33 +0000 UTC" firstStartedPulling="2025-11-30 07:03:35.565218328 +0000 UTC m=+1036.333389937" lastFinishedPulling="2025-11-30 07:04:05.395545562 +0000 UTC m=+1066.163717171" observedRunningTime="2025-11-30 07:04:11.141472432 +0000 UTC m=+1071.909644041" watchObservedRunningTime="2025-11-30 07:04:11.155607412 +0000 UTC m=+1071.923779041" Nov 30 07:04:12 crc kubenswrapper[4941]: I1130 07:04:12.119512 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerStarted","Data":"ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f"} Nov 30 07:04:12 crc kubenswrapper[4941]: I1130 07:04:12.119776 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerStarted","Data":"de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9"} Nov 30 07:04:12 crc kubenswrapper[4941]: I1130 07:04:12.120148 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:04:12 crc kubenswrapper[4941]: I1130 07:04:12.120171 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:04:12 crc kubenswrapper[4941]: I1130 07:04:12.148842 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-slfpx" podStartSLOduration=27.432606728 podStartE2EDuration="31.148825275s" podCreationTimestamp="2025-11-30 07:03:41 +0000 UTC" firstStartedPulling="2025-11-30 07:04:06.147153091 +0000 UTC m=+1066.915324700" lastFinishedPulling="2025-11-30 07:04:09.863371638 +0000 UTC m=+1070.631543247" observedRunningTime="2025-11-30 07:04:12.142743575 +0000 UTC m=+1072.910915174" watchObservedRunningTime="2025-11-30 07:04:12.148825275 +0000 UTC m=+1072.916996884" Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.138522 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ea6e32f6-a5d9-4b23-9588-2ea6be572e72","Type":"ContainerStarted","Data":"e2122d103d04cc614de8738002d8a386052f20ec6347c472adc19bc507389c00"} Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.142165 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"14ce638b-1621-451a-80b6-0e13b6ffb734","Type":"ContainerStarted","Data":"d012ac9e18256da2434ecdcf48894a12178c369b643c9b30f25771ee1c4fba92"} Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.165271 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=22.803362682 podStartE2EDuration="30.165254353s" podCreationTimestamp="2025-11-30 07:03:44 +0000 UTC" firstStartedPulling="2025-11-30 07:04:06.395465594 +0000 UTC m=+1067.163637203" lastFinishedPulling="2025-11-30 07:04:13.757357245 +0000 UTC m=+1074.525528874" observedRunningTime="2025-11-30 07:04:14.161463015 +0000 UTC m=+1074.929634624" watchObservedRunningTime="2025-11-30 07:04:14.165254353 +0000 UTC m=+1074.933425962" Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.194988 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=24.317702079 podStartE2EDuration="31.194962017s" podCreationTimestamp="2025-11-30 07:03:43 +0000 UTC" firstStartedPulling="2025-11-30 07:04:06.871639915 +0000 UTC m=+1067.639811524" lastFinishedPulling="2025-11-30 07:04:13.748899853 +0000 UTC m=+1074.517071462" observedRunningTime="2025-11-30 07:04:14.18668904 +0000 UTC m=+1074.954860649" watchObservedRunningTime="2025-11-30 07:04:14.194962017 +0000 UTC m=+1074.963133626" Nov 30 07:04:14 crc kubenswrapper[4941]: E1130 07:04:14.542521 4941 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.107:43388->38.102.83.107:45643: write tcp 38.102.83.107:43388->38.102.83.107:45643: write: broken pipe Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.899286 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.899368 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.931710 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.931764 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 30 07:04:14 crc kubenswrapper[4941]: I1130 07:04:14.974103 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.117362 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.187949 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.248277 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.453039 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-nbnlx"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.511586 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-576sr"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.512885 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.519597 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.552353 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-rwxvw"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.554635 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.559169 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.571724 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-576sr"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.581071 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rwxvw"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623469 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-config\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623515 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623563 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-ovsdbserver-sb\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623583 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovs-rundir\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623610 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-combined-ca-bundle\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623641 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt5k5\" (UniqueName: \"kubernetes.io/projected/94cbff61-3614-4efd-b4ba-36bef65f2ae7-kube-api-access-vt5k5\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623658 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-dns-svc\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623703 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovn-rundir\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623721 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cbff61-3614-4efd-b4ba-36bef65f2ae7-config\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.623747 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf9gd\" (UniqueName: \"kubernetes.io/projected/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-kube-api-access-wf9gd\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.725707 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-config\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726082 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726135 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-ovsdbserver-sb\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726157 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovs-rundir\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726186 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-combined-ca-bundle\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726238 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt5k5\" (UniqueName: \"kubernetes.io/projected/94cbff61-3614-4efd-b4ba-36bef65f2ae7-kube-api-access-vt5k5\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726265 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-dns-svc\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726349 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cbff61-3614-4efd-b4ba-36bef65f2ae7-config\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726367 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovn-rundir\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.726396 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf9gd\" (UniqueName: \"kubernetes.io/projected/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-kube-api-access-wf9gd\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.727944 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-config\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.728785 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovs-rundir\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.729274 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cbff61-3614-4efd-b4ba-36bef65f2ae7-config\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.729820 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-dns-svc\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.729868 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovn-rundir\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.735824 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-ovsdbserver-sb\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.753094 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.765122 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-combined-ca-bundle\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.773150 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf9gd\" (UniqueName: \"kubernetes.io/projected/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-kube-api-access-wf9gd\") pod \"dnsmasq-dns-b7ccdcb4f-576sr\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.814950 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt5k5\" (UniqueName: \"kubernetes.io/projected/94cbff61-3614-4efd-b4ba-36bef65f2ae7-kube-api-access-vt5k5\") pod \"ovn-controller-metrics-rwxvw\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.841785 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.859935 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-fcvvs"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.892078 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.912574 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-d7tlx"] Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.924366 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.931026 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.934350 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-sb\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.934443 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-dns-svc\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.934491 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgcr9\" (UniqueName: \"kubernetes.io/projected/79cf65c4-a135-447d-b8ca-6c219b698395-kube-api-access-cgcr9\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.934513 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-nb\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.934539 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-config\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:15 crc kubenswrapper[4941]: I1130 07:04:15.954662 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-d7tlx"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.035966 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-sb\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.036051 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-dns-svc\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.036106 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgcr9\" (UniqueName: \"kubernetes.io/projected/79cf65c4-a135-447d-b8ca-6c219b698395-kube-api-access-cgcr9\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.036128 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-nb\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.036155 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-config\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.037095 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-config\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.040904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-sb\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.041484 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-dns-svc\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.042244 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-nb\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.059244 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.076892 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgcr9\" (UniqueName: \"kubernetes.io/projected/79cf65c4-a135-447d-b8ca-6c219b698395-kube-api-access-cgcr9\") pod \"dnsmasq-dns-5bd7c66845-d7tlx\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.100029 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.100130 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.137461 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-dns-svc\") pod \"75700cb8-83c7-405b-b8a2-dc133534ef90\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.137811 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf97b\" (UniqueName: \"kubernetes.io/projected/75700cb8-83c7-405b-b8a2-dc133534ef90-kube-api-access-lf97b\") pod \"75700cb8-83c7-405b-b8a2-dc133534ef90\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.137860 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-config\") pod \"75700cb8-83c7-405b-b8a2-dc133534ef90\" (UID: \"75700cb8-83c7-405b-b8a2-dc133534ef90\") " Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.137997 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "75700cb8-83c7-405b-b8a2-dc133534ef90" (UID: "75700cb8-83c7-405b-b8a2-dc133534ef90"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.138435 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.138434 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-config" (OuterVolumeSpecName: "config") pod "75700cb8-83c7-405b-b8a2-dc133534ef90" (UID: "75700cb8-83c7-405b-b8a2-dc133534ef90"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.144794 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75700cb8-83c7-405b-b8a2-dc133534ef90-kube-api-access-lf97b" (OuterVolumeSpecName: "kube-api-access-lf97b") pod "75700cb8-83c7-405b-b8a2-dc133534ef90" (UID: "75700cb8-83c7-405b-b8a2-dc133534ef90"). InnerVolumeSpecName "kube-api-access-lf97b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.162518 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" event={"ID":"75700cb8-83c7-405b-b8a2-dc133534ef90","Type":"ContainerDied","Data":"051559ab70fe5c3e88d6e1ec62671d4088bf55f3dfc5b592f0e63c679967de85"} Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.162744 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cd665b7c7-nbnlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.193927 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.231024 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-nbnlx"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.239887 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf97b\" (UniqueName: \"kubernetes.io/projected/75700cb8-83c7-405b-b8a2-dc133534ef90-kube-api-access-lf97b\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.239918 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75700cb8-83c7-405b-b8a2-dc133534ef90-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.241712 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cd665b7c7-nbnlx"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.256215 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.266100 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.335401 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-576sr"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.443503 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgscv\" (UniqueName: \"kubernetes.io/projected/d850f0b1-322a-43e6-a98a-fe18676ada8a-kube-api-access-xgscv\") pod \"d850f0b1-322a-43e6-a98a-fe18676ada8a\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.444042 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-dns-svc\") pod \"d850f0b1-322a-43e6-a98a-fe18676ada8a\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.444201 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-config\") pod \"d850f0b1-322a-43e6-a98a-fe18676ada8a\" (UID: \"d850f0b1-322a-43e6-a98a-fe18676ada8a\") " Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.444997 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-config" (OuterVolumeSpecName: "config") pod "d850f0b1-322a-43e6-a98a-fe18676ada8a" (UID: "d850f0b1-322a-43e6-a98a-fe18676ada8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.446351 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d850f0b1-322a-43e6-a98a-fe18676ada8a" (UID: "d850f0b1-322a-43e6-a98a-fe18676ada8a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.448752 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d850f0b1-322a-43e6-a98a-fe18676ada8a-kube-api-access-xgscv" (OuterVolumeSpecName: "kube-api-access-xgscv") pod "d850f0b1-322a-43e6-a98a-fe18676ada8a" (UID: "d850f0b1-322a-43e6-a98a-fe18676ada8a"). InnerVolumeSpecName "kube-api-access-xgscv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.546258 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.546295 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d850f0b1-322a-43e6-a98a-fe18676ada8a-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.546305 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgscv\" (UniqueName: \"kubernetes.io/projected/d850f0b1-322a-43e6-a98a-fe18676ada8a-kube-api-access-xgscv\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.555141 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-rwxvw"] Nov 30 07:04:16 crc kubenswrapper[4941]: W1130 07:04:16.566978 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94cbff61_3614_4efd_b4ba_36bef65f2ae7.slice/crio-36b116bad0212daa30d59ae82586ec7c2c84ebc36044a264d3a5765707f99ccb WatchSource:0}: Error finding container 36b116bad0212daa30d59ae82586ec7c2c84ebc36044a264d3a5765707f99ccb: Status 404 returned error can't find the container with id 36b116bad0212daa30d59ae82586ec7c2c84ebc36044a264d3a5765707f99ccb Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.682371 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-d7tlx"] Nov 30 07:04:16 crc kubenswrapper[4941]: W1130 07:04:16.689145 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79cf65c4_a135_447d_b8ca_6c219b698395.slice/crio-e907d3da9e67bf2bf6db3e3d0dd5f10ea797cd49229adea15a3ebfddaaab1574 WatchSource:0}: Error finding container e907d3da9e67bf2bf6db3e3d0dd5f10ea797cd49229adea15a3ebfddaaab1574: Status 404 returned error can't find the container with id e907d3da9e67bf2bf6db3e3d0dd5f10ea797cd49229adea15a3ebfddaaab1574 Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.819808 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f9c5-account-create-update-r4chg"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.821495 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.825620 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.831340 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f9c5-account-create-update-r4chg"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.898300 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-nn258"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.900602 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nn258" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.908203 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nn258"] Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.953973 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4kfz\" (UniqueName: \"kubernetes.io/projected/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-kube-api-access-s4kfz\") pod \"keystone-f9c5-account-create-update-r4chg\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:16 crc kubenswrapper[4941]: I1130 07:04:16.954056 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-operator-scripts\") pod \"keystone-f9c5-account-create-update-r4chg\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.055609 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrgvc\" (UniqueName: \"kubernetes.io/projected/831541f1-b1bf-4625-b4db-8a2b57d0481e-kube-api-access-mrgvc\") pod \"keystone-db-create-nn258\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.055707 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/831541f1-b1bf-4625-b4db-8a2b57d0481e-operator-scripts\") pod \"keystone-db-create-nn258\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.056242 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4kfz\" (UniqueName: \"kubernetes.io/projected/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-kube-api-access-s4kfz\") pod \"keystone-f9c5-account-create-update-r4chg\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.056460 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-operator-scripts\") pod \"keystone-f9c5-account-create-update-r4chg\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.058318 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-operator-scripts\") pod \"keystone-f9c5-account-create-update-r4chg\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.079774 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4kfz\" (UniqueName: \"kubernetes.io/projected/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-kube-api-access-s4kfz\") pod \"keystone-f9c5-account-create-update-r4chg\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.140780 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-pk4mk"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.142415 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.149996 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pk4mk"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.159214 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrgvc\" (UniqueName: \"kubernetes.io/projected/831541f1-b1bf-4625-b4db-8a2b57d0481e-kube-api-access-mrgvc\") pod \"keystone-db-create-nn258\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.159350 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/831541f1-b1bf-4625-b4db-8a2b57d0481e-operator-scripts\") pod \"keystone-db-create-nn258\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.160531 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.161117 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/831541f1-b1bf-4625-b4db-8a2b57d0481e-operator-scripts\") pod \"keystone-db-create-nn258\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.180267 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrgvc\" (UniqueName: \"kubernetes.io/projected/831541f1-b1bf-4625-b4db-8a2b57d0481e-kube-api-access-mrgvc\") pod \"keystone-db-create-nn258\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.190975 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" event={"ID":"a4f2b928-a1f9-4a85-a094-ea6c19e7c110","Type":"ContainerStarted","Data":"e705a04d71c5f9919e0e61a54c449cf5306f422e65c4b7c9f7ef36070d791c11"} Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.192348 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" event={"ID":"79cf65c4-a135-447d-b8ca-6c219b698395","Type":"ContainerStarted","Data":"e907d3da9e67bf2bf6db3e3d0dd5f10ea797cd49229adea15a3ebfddaaab1574"} Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.193717 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rwxvw" event={"ID":"94cbff61-3614-4efd-b4ba-36bef65f2ae7","Type":"ContainerStarted","Data":"c0d359c73119f6c2eb67ed8df045fe124ba61df2f455244a61a488956e3a2e34"} Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.193747 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rwxvw" event={"ID":"94cbff61-3614-4efd-b4ba-36bef65f2ae7","Type":"ContainerStarted","Data":"36b116bad0212daa30d59ae82586ec7c2c84ebc36044a264d3a5765707f99ccb"} Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.195940 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa90de41-9166-475c-925a-3d79b02a694d","Type":"ContainerStarted","Data":"ef1eef5d4d1e7312fddf801ca1f92d70d30764509fec5b54de267fa957a12e8b"} Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.197903 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.198024 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8446fd7c75-fcvvs" event={"ID":"d850f0b1-322a-43e6-a98a-fe18676ada8a","Type":"ContainerDied","Data":"c1633595662d73ceeccdc2145e380a1d6efc3a2ba4b4f4e37dbc249f9b08c51d"} Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.221407 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-rwxvw" podStartSLOduration=2.2213799 podStartE2EDuration="2.2213799s" podCreationTimestamp="2025-11-30 07:04:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:17.218315364 +0000 UTC m=+1077.986486973" watchObservedRunningTime="2025-11-30 07:04:17.2213799 +0000 UTC m=+1077.989551509" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.234773 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nn258" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.261963 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slj6l\" (UniqueName: \"kubernetes.io/projected/a31804e8-764b-460f-bd79-e64ef5b7d06a-kube-api-access-slj6l\") pod \"placement-db-create-pk4mk\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.262094 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a31804e8-764b-460f-bd79-e64ef5b7d06a-operator-scripts\") pod \"placement-db-create-pk4mk\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.265819 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6f8e-account-create-update-n2v6n"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.267250 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.271042 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.284316 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6f8e-account-create-update-n2v6n"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.317477 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.382988 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a31804e8-764b-460f-bd79-e64ef5b7d06a-operator-scripts\") pod \"placement-db-create-pk4mk\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.384381 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk57v\" (UniqueName: \"kubernetes.io/projected/c08bb8f0-daa5-4321-8e42-bf2713833cbf-kube-api-access-jk57v\") pod \"placement-6f8e-account-create-update-n2v6n\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.385290 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a31804e8-764b-460f-bd79-e64ef5b7d06a-operator-scripts\") pod \"placement-db-create-pk4mk\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.386896 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slj6l\" (UniqueName: \"kubernetes.io/projected/a31804e8-764b-460f-bd79-e64ef5b7d06a-kube-api-access-slj6l\") pod \"placement-db-create-pk4mk\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.387309 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08bb8f0-daa5-4321-8e42-bf2713833cbf-operator-scripts\") pod \"placement-6f8e-account-create-update-n2v6n\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.450892 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slj6l\" (UniqueName: \"kubernetes.io/projected/a31804e8-764b-460f-bd79-e64ef5b7d06a-kube-api-access-slj6l\") pod \"placement-db-create-pk4mk\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.478957 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-fcvvs"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.491930 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08bb8f0-daa5-4321-8e42-bf2713833cbf-operator-scripts\") pod \"placement-6f8e-account-create-update-n2v6n\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.494486 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk57v\" (UniqueName: \"kubernetes.io/projected/c08bb8f0-daa5-4321-8e42-bf2713833cbf-kube-api-access-jk57v\") pod \"placement-6f8e-account-create-update-n2v6n\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.496468 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08bb8f0-daa5-4321-8e42-bf2713833cbf-operator-scripts\") pod \"placement-6f8e-account-create-update-n2v6n\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.496492 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8446fd7c75-fcvvs"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.522116 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk57v\" (UniqueName: \"kubernetes.io/projected/c08bb8f0-daa5-4321-8e42-bf2713833cbf-kube-api-access-jk57v\") pod \"placement-6f8e-account-create-update-n2v6n\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.534576 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75700cb8-83c7-405b-b8a2-dc133534ef90" path="/var/lib/kubelet/pods/75700cb8-83c7-405b-b8a2-dc133534ef90/volumes" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.535015 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d850f0b1-322a-43e6-a98a-fe18676ada8a" path="/var/lib/kubelet/pods/d850f0b1-322a-43e6-a98a-fe18676ada8a/volumes" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.619533 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.662189 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.687319 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.689750 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.692655 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.693125 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.693427 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.693654 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-v6dzf" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.705574 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.798856 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/322fb449-5599-45af-97e2-158692366d9b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.798922 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv8z8\" (UniqueName: \"kubernetes.io/projected/322fb449-5599-45af-97e2-158692366d9b-kube-api-access-bv8z8\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.798971 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.799005 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.799179 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-scripts\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.799293 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-config\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.799653 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.845556 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f9c5-account-create-update-r4chg"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.863714 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nn258"] Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.901823 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-scripts\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.902240 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-config\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.902348 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.902396 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/322fb449-5599-45af-97e2-158692366d9b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.902424 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv8z8\" (UniqueName: \"kubernetes.io/projected/322fb449-5599-45af-97e2-158692366d9b-kube-api-access-bv8z8\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.902454 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.902478 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.908118 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-scripts\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.908384 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-config\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.910988 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/322fb449-5599-45af-97e2-158692366d9b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.911294 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.916252 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.928202 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:17 crc kubenswrapper[4941]: I1130 07:04:17.928526 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv8z8\" (UniqueName: \"kubernetes.io/projected/322fb449-5599-45af-97e2-158692366d9b-kube-api-access-bv8z8\") pod \"ovn-northd-0\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " pod="openstack/ovn-northd-0" Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.022808 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.080635 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6f8e-account-create-update-n2v6n"] Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.130285 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pk4mk"] Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.233241 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pk4mk" event={"ID":"a31804e8-764b-460f-bd79-e64ef5b7d06a","Type":"ContainerStarted","Data":"76ac10066bd3ad5bed03042e4163a40a345b0a536de19a7c73e9ad430c65a3f5"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.235719 4941 generic.go:334] "Generic (PLEG): container finished" podID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerID="4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea" exitCode=0 Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.235801 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" event={"ID":"a4f2b928-a1f9-4a85-a094-ea6c19e7c110","Type":"ContainerDied","Data":"4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.245569 4941 generic.go:334] "Generic (PLEG): container finished" podID="79cf65c4-a135-447d-b8ca-6c219b698395" containerID="1db871b8a00f83aec555cbdd8a63670b3bf4a51f652a0f30641ce6a0ae860880" exitCode=0 Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.245735 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" event={"ID":"79cf65c4-a135-447d-b8ca-6c219b698395","Type":"ContainerDied","Data":"1db871b8a00f83aec555cbdd8a63670b3bf4a51f652a0f30641ce6a0ae860880"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.255294 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f8e-account-create-update-n2v6n" event={"ID":"c08bb8f0-daa5-4321-8e42-bf2713833cbf","Type":"ContainerStarted","Data":"4627a0553a5f0e66bbcc124a38aed5e7a41719832381e4d92b5c457b6c0e82e1"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.264486 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f9c5-account-create-update-r4chg" event={"ID":"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3","Type":"ContainerStarted","Data":"0699e9d89734106167f49496bec6872e386a90ae7040eadeef3df97ab4bf7530"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.264547 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f9c5-account-create-update-r4chg" event={"ID":"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3","Type":"ContainerStarted","Data":"d8a0eaa8bd923ea1b73f1c1518326faa59a45219588418a2cec71f390c083273"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.268826 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nn258" event={"ID":"831541f1-b1bf-4625-b4db-8a2b57d0481e","Type":"ContainerStarted","Data":"e84f6dd36a19f12cd165d9c7cbfb94fb5ea22319817c0302e9b4c7b1ddb13109"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.269163 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nn258" event={"ID":"831541f1-b1bf-4625-b4db-8a2b57d0481e","Type":"ContainerStarted","Data":"fdd5e03779e9571b75847d7be5135f2f13dff5f707d7a9ed371a20f0f27e1022"} Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.307556 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-nn258" podStartSLOduration=2.307531863 podStartE2EDuration="2.307531863s" podCreationTimestamp="2025-11-30 07:04:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:18.305764608 +0000 UTC m=+1079.073936217" watchObservedRunningTime="2025-11-30 07:04:18.307531863 +0000 UTC m=+1079.075703472" Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.355526 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-f9c5-account-create-update-r4chg" podStartSLOduration=2.355497645 podStartE2EDuration="2.355497645s" podCreationTimestamp="2025-11-30 07:04:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:18.329027322 +0000 UTC m=+1079.097198951" watchObservedRunningTime="2025-11-30 07:04:18.355497645 +0000 UTC m=+1079.123669254" Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.601295 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 07:04:18 crc kubenswrapper[4941]: I1130 07:04:18.982821 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.301938 4941 generic.go:334] "Generic (PLEG): container finished" podID="6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" containerID="0699e9d89734106167f49496bec6872e386a90ae7040eadeef3df97ab4bf7530" exitCode=0 Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.302115 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f9c5-account-create-update-r4chg" event={"ID":"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3","Type":"ContainerDied","Data":"0699e9d89734106167f49496bec6872e386a90ae7040eadeef3df97ab4bf7530"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.305347 4941 generic.go:334] "Generic (PLEG): container finished" podID="831541f1-b1bf-4625-b4db-8a2b57d0481e" containerID="e84f6dd36a19f12cd165d9c7cbfb94fb5ea22319817c0302e9b4c7b1ddb13109" exitCode=0 Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.305440 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nn258" event={"ID":"831541f1-b1bf-4625-b4db-8a2b57d0481e","Type":"ContainerDied","Data":"e84f6dd36a19f12cd165d9c7cbfb94fb5ea22319817c0302e9b4c7b1ddb13109"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.308310 4941 generic.go:334] "Generic (PLEG): container finished" podID="a31804e8-764b-460f-bd79-e64ef5b7d06a" containerID="6620a3aa486742d5043cee90f84c857185f75eb6aa26481d3b5919db990ebcf2" exitCode=0 Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.308421 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pk4mk" event={"ID":"a31804e8-764b-460f-bd79-e64ef5b7d06a","Type":"ContainerDied","Data":"6620a3aa486742d5043cee90f84c857185f75eb6aa26481d3b5919db990ebcf2"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.310041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" event={"ID":"a4f2b928-a1f9-4a85-a094-ea6c19e7c110","Type":"ContainerStarted","Data":"6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.311010 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.324430 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" event={"ID":"79cf65c4-a135-447d-b8ca-6c219b698395","Type":"ContainerStarted","Data":"f0e9148b7ea9aceb67086d995581b0c78637918d8931c2b43b089a091826c1ec"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.324555 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.331159 4941 generic.go:334] "Generic (PLEG): container finished" podID="c08bb8f0-daa5-4321-8e42-bf2713833cbf" containerID="2c3d7dc00d9636d871b92937c391e1fe53cfe7256b79a79c00f7b0ed2afdc5b4" exitCode=0 Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.331227 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f8e-account-create-update-n2v6n" event={"ID":"c08bb8f0-daa5-4321-8e42-bf2713833cbf","Type":"ContainerDied","Data":"2c3d7dc00d9636d871b92937c391e1fe53cfe7256b79a79c00f7b0ed2afdc5b4"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.335681 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"322fb449-5599-45af-97e2-158692366d9b","Type":"ContainerStarted","Data":"a27920a1da68143556f6bb75d8290f2b7b6ff656559929b9f3e93d0181c13e92"} Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.356195 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" podStartSLOduration=3.640650994 podStartE2EDuration="4.35617861s" podCreationTimestamp="2025-11-30 07:04:15 +0000 UTC" firstStartedPulling="2025-11-30 07:04:16.350408769 +0000 UTC m=+1077.118580378" lastFinishedPulling="2025-11-30 07:04:17.065936385 +0000 UTC m=+1077.834107994" observedRunningTime="2025-11-30 07:04:19.354014012 +0000 UTC m=+1080.122185621" watchObservedRunningTime="2025-11-30 07:04:19.35617861 +0000 UTC m=+1080.124350209" Nov 30 07:04:19 crc kubenswrapper[4941]: I1130 07:04:19.409265 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" podStartSLOduration=3.527011 podStartE2EDuration="4.409243701s" podCreationTimestamp="2025-11-30 07:04:15 +0000 UTC" firstStartedPulling="2025-11-30 07:04:16.692303733 +0000 UTC m=+1077.460475342" lastFinishedPulling="2025-11-30 07:04:17.574536434 +0000 UTC m=+1078.342708043" observedRunningTime="2025-11-30 07:04:19.407018411 +0000 UTC m=+1080.175190020" watchObservedRunningTime="2025-11-30 07:04:19.409243701 +0000 UTC m=+1080.177415300" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.350548 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"322fb449-5599-45af-97e2-158692366d9b","Type":"ContainerStarted","Data":"eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274"} Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.351505 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.353670 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c05c5cf3-bcb4-4307-a601-fbecde4f026b","Type":"ContainerStarted","Data":"c8dea3d901536f92143b2d0853186bb74f8eb40c02c82dcf71d84565f2e4dbc7"} Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.374903 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.11024404 podStartE2EDuration="3.374871765s" podCreationTimestamp="2025-11-30 07:04:17 +0000 UTC" firstStartedPulling="2025-11-30 07:04:18.613444278 +0000 UTC m=+1079.381615887" lastFinishedPulling="2025-11-30 07:04:19.878071993 +0000 UTC m=+1080.646243612" observedRunningTime="2025-11-30 07:04:20.371354706 +0000 UTC m=+1081.139526355" watchObservedRunningTime="2025-11-30 07:04:20.374871765 +0000 UTC m=+1081.143043384" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.395612 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.10443602 podStartE2EDuration="44.395593979s" podCreationTimestamp="2025-11-30 07:03:36 +0000 UTC" firstStartedPulling="2025-11-30 07:03:37.825619505 +0000 UTC m=+1038.593791114" lastFinishedPulling="2025-11-30 07:04:19.116777464 +0000 UTC m=+1079.884949073" observedRunningTime="2025-11-30 07:04:20.394748194 +0000 UTC m=+1081.162919803" watchObservedRunningTime="2025-11-30 07:04:20.395593979 +0000 UTC m=+1081.163765588" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.687140 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.780161 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a31804e8-764b-460f-bd79-e64ef5b7d06a-operator-scripts\") pod \"a31804e8-764b-460f-bd79-e64ef5b7d06a\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.780252 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slj6l\" (UniqueName: \"kubernetes.io/projected/a31804e8-764b-460f-bd79-e64ef5b7d06a-kube-api-access-slj6l\") pod \"a31804e8-764b-460f-bd79-e64ef5b7d06a\" (UID: \"a31804e8-764b-460f-bd79-e64ef5b7d06a\") " Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.780840 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31804e8-764b-460f-bd79-e64ef5b7d06a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a31804e8-764b-460f-bd79-e64ef5b7d06a" (UID: "a31804e8-764b-460f-bd79-e64ef5b7d06a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.787294 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31804e8-764b-460f-bd79-e64ef5b7d06a-kube-api-access-slj6l" (OuterVolumeSpecName: "kube-api-access-slj6l") pod "a31804e8-764b-460f-bd79-e64ef5b7d06a" (UID: "a31804e8-764b-460f-bd79-e64ef5b7d06a"). InnerVolumeSpecName "kube-api-access-slj6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.852972 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.883691 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a31804e8-764b-460f-bd79-e64ef5b7d06a-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.883761 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slj6l\" (UniqueName: \"kubernetes.io/projected/a31804e8-764b-460f-bd79-e64ef5b7d06a-kube-api-access-slj6l\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.890463 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.985189 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-operator-scripts\") pod \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.985657 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" (UID: "6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.985736 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4kfz\" (UniqueName: \"kubernetes.io/projected/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-kube-api-access-s4kfz\") pod \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\" (UID: \"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3\") " Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.985905 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08bb8f0-daa5-4321-8e42-bf2713833cbf-operator-scripts\") pod \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.985926 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk57v\" (UniqueName: \"kubernetes.io/projected/c08bb8f0-daa5-4321-8e42-bf2713833cbf-kube-api-access-jk57v\") pod \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\" (UID: \"c08bb8f0-daa5-4321-8e42-bf2713833cbf\") " Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.986342 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.986347 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c08bb8f0-daa5-4321-8e42-bf2713833cbf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c08bb8f0-daa5-4321-8e42-bf2713833cbf" (UID: "c08bb8f0-daa5-4321-8e42-bf2713833cbf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.989432 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-kube-api-access-s4kfz" (OuterVolumeSpecName: "kube-api-access-s4kfz") pod "6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" (UID: "6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3"). InnerVolumeSpecName "kube-api-access-s4kfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:20 crc kubenswrapper[4941]: I1130 07:04:20.990011 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c08bb8f0-daa5-4321-8e42-bf2713833cbf-kube-api-access-jk57v" (OuterVolumeSpecName: "kube-api-access-jk57v") pod "c08bb8f0-daa5-4321-8e42-bf2713833cbf" (UID: "c08bb8f0-daa5-4321-8e42-bf2713833cbf"). InnerVolumeSpecName "kube-api-access-jk57v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.056304 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nn258" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.087805 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c08bb8f0-daa5-4321-8e42-bf2713833cbf-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.087845 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk57v\" (UniqueName: \"kubernetes.io/projected/c08bb8f0-daa5-4321-8e42-bf2713833cbf-kube-api-access-jk57v\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.087861 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4kfz\" (UniqueName: \"kubernetes.io/projected/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3-kube-api-access-s4kfz\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.189462 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/831541f1-b1bf-4625-b4db-8a2b57d0481e-operator-scripts\") pod \"831541f1-b1bf-4625-b4db-8a2b57d0481e\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.189693 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrgvc\" (UniqueName: \"kubernetes.io/projected/831541f1-b1bf-4625-b4db-8a2b57d0481e-kube-api-access-mrgvc\") pod \"831541f1-b1bf-4625-b4db-8a2b57d0481e\" (UID: \"831541f1-b1bf-4625-b4db-8a2b57d0481e\") " Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.190167 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831541f1-b1bf-4625-b4db-8a2b57d0481e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "831541f1-b1bf-4625-b4db-8a2b57d0481e" (UID: "831541f1-b1bf-4625-b4db-8a2b57d0481e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.190276 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/831541f1-b1bf-4625-b4db-8a2b57d0481e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.193609 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831541f1-b1bf-4625-b4db-8a2b57d0481e-kube-api-access-mrgvc" (OuterVolumeSpecName: "kube-api-access-mrgvc") pod "831541f1-b1bf-4625-b4db-8a2b57d0481e" (UID: "831541f1-b1bf-4625-b4db-8a2b57d0481e"). InnerVolumeSpecName "kube-api-access-mrgvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.292346 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrgvc\" (UniqueName: \"kubernetes.io/projected/831541f1-b1bf-4625-b4db-8a2b57d0481e-kube-api-access-mrgvc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.365140 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f9c5-account-create-update-r4chg" event={"ID":"6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3","Type":"ContainerDied","Data":"d8a0eaa8bd923ea1b73f1c1518326faa59a45219588418a2cec71f390c083273"} Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.365200 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8a0eaa8bd923ea1b73f1c1518326faa59a45219588418a2cec71f390c083273" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.365198 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f9c5-account-create-update-r4chg" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.367789 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nn258" event={"ID":"831541f1-b1bf-4625-b4db-8a2b57d0481e","Type":"ContainerDied","Data":"fdd5e03779e9571b75847d7be5135f2f13dff5f707d7a9ed371a20f0f27e1022"} Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.367845 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdd5e03779e9571b75847d7be5135f2f13dff5f707d7a9ed371a20f0f27e1022" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.367797 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nn258" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.369810 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pk4mk" event={"ID":"a31804e8-764b-460f-bd79-e64ef5b7d06a","Type":"ContainerDied","Data":"76ac10066bd3ad5bed03042e4163a40a345b0a536de19a7c73e9ad430c65a3f5"} Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.369833 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pk4mk" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.369842 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76ac10066bd3ad5bed03042e4163a40a345b0a536de19a7c73e9ad430c65a3f5" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.371905 4941 generic.go:334] "Generic (PLEG): container finished" podID="fa90de41-9166-475c-925a-3d79b02a694d" containerID="ef1eef5d4d1e7312fddf801ca1f92d70d30764509fec5b54de267fa957a12e8b" exitCode=0 Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.371964 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa90de41-9166-475c-925a-3d79b02a694d","Type":"ContainerDied","Data":"ef1eef5d4d1e7312fddf801ca1f92d70d30764509fec5b54de267fa957a12e8b"} Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.377133 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f8e-account-create-update-n2v6n" event={"ID":"c08bb8f0-daa5-4321-8e42-bf2713833cbf","Type":"ContainerDied","Data":"4627a0553a5f0e66bbcc124a38aed5e7a41719832381e4d92b5c457b6c0e82e1"} Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.377144 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f8e-account-create-update-n2v6n" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.377166 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4627a0553a5f0e66bbcc124a38aed5e7a41719832381e4d92b5c457b6c0e82e1" Nov 30 07:04:21 crc kubenswrapper[4941]: I1130 07:04:21.387503 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"322fb449-5599-45af-97e2-158692366d9b","Type":"ContainerStarted","Data":"580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283"} Nov 30 07:04:22 crc kubenswrapper[4941]: I1130 07:04:22.138550 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 30 07:04:22 crc kubenswrapper[4941]: I1130 07:04:22.402409 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa90de41-9166-475c-925a-3d79b02a694d","Type":"ContainerStarted","Data":"5c6f3a2a19c5ce5cd94327bc758bf13a86b7ae51c23f1c360ae9657455e0503b"} Nov 30 07:04:22 crc kubenswrapper[4941]: I1130 07:04:22.423852 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371988.43094 podStartE2EDuration="48.423835286s" podCreationTimestamp="2025-11-30 07:03:34 +0000 UTC" firstStartedPulling="2025-11-30 07:03:36.985519534 +0000 UTC m=+1037.753691143" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:22.422018989 +0000 UTC m=+1083.190190608" watchObservedRunningTime="2025-11-30 07:04:22.423835286 +0000 UTC m=+1083.192006895" Nov 30 07:04:25 crc kubenswrapper[4941]: I1130 07:04:25.843953 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.268511 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.294968 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.295211 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.317260 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-576sr"] Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.401701 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.433158 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerName="dnsmasq-dns" containerID="cri-o://6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c" gracePeriod=10 Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.502457 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 30 07:04:26 crc kubenswrapper[4941]: I1130 07:04:26.919028 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.002528 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-config\") pod \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.002592 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf9gd\" (UniqueName: \"kubernetes.io/projected/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-kube-api-access-wf9gd\") pod \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.002665 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-dns-svc\") pod \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.002721 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-ovsdbserver-sb\") pod \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\" (UID: \"a4f2b928-a1f9-4a85-a094-ea6c19e7c110\") " Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.018904 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-kube-api-access-wf9gd" (OuterVolumeSpecName: "kube-api-access-wf9gd") pod "a4f2b928-a1f9-4a85-a094-ea6c19e7c110" (UID: "a4f2b928-a1f9-4a85-a094-ea6c19e7c110"). InnerVolumeSpecName "kube-api-access-wf9gd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.058950 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a4f2b928-a1f9-4a85-a094-ea6c19e7c110" (UID: "a4f2b928-a1f9-4a85-a094-ea6c19e7c110"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.060270 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a4f2b928-a1f9-4a85-a094-ea6c19e7c110" (UID: "a4f2b928-a1f9-4a85-a094-ea6c19e7c110"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.076610 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-config" (OuterVolumeSpecName: "config") pod "a4f2b928-a1f9-4a85-a094-ea6c19e7c110" (UID: "a4f2b928-a1f9-4a85-a094-ea6c19e7c110"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.104704 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.104742 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf9gd\" (UniqueName: \"kubernetes.io/projected/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-kube-api-access-wf9gd\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.104755 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.104766 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a4f2b928-a1f9-4a85-a094-ea6c19e7c110-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.139470 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403154 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-s2h5c"] Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.403623 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831541f1-b1bf-4625-b4db-8a2b57d0481e" containerName="mariadb-database-create" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403669 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="831541f1-b1bf-4625-b4db-8a2b57d0481e" containerName="mariadb-database-create" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.403690 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" containerName="mariadb-account-create-update" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403700 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" containerName="mariadb-account-create-update" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.403722 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerName="dnsmasq-dns" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403730 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerName="dnsmasq-dns" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.403751 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08bb8f0-daa5-4321-8e42-bf2713833cbf" containerName="mariadb-account-create-update" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403761 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08bb8f0-daa5-4321-8e42-bf2713833cbf" containerName="mariadb-account-create-update" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.403770 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerName="init" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403777 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerName="init" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.403790 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a31804e8-764b-460f-bd79-e64ef5b7d06a" containerName="mariadb-database-create" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403799 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a31804e8-764b-460f-bd79-e64ef5b7d06a" containerName="mariadb-database-create" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.403989 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerName="dnsmasq-dns" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.404005 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a31804e8-764b-460f-bd79-e64ef5b7d06a" containerName="mariadb-database-create" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.404013 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" containerName="mariadb-account-create-update" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.404031 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="831541f1-b1bf-4625-b4db-8a2b57d0481e" containerName="mariadb-database-create" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.404043 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08bb8f0-daa5-4321-8e42-bf2713833cbf" containerName="mariadb-account-create-update" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.404719 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.411250 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-s2h5c"] Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.442354 4941 generic.go:334] "Generic (PLEG): container finished" podID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" containerID="6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c" exitCode=0 Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.442409 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.442443 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" event={"ID":"a4f2b928-a1f9-4a85-a094-ea6c19e7c110","Type":"ContainerDied","Data":"6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c"} Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.442490 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b7ccdcb4f-576sr" event={"ID":"a4f2b928-a1f9-4a85-a094-ea6c19e7c110","Type":"ContainerDied","Data":"e705a04d71c5f9919e0e61a54c449cf5306f422e65c4b7c9f7ef36070d791c11"} Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.442510 4941 scope.go:117] "RemoveContainer" containerID="6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.474097 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-576sr"] Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.481752 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b7ccdcb4f-576sr"] Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.481940 4941 scope.go:117] "RemoveContainer" containerID="4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.491132 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-c0c5-account-create-update-d484t"] Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.495338 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.498900 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.510232 4941 scope.go:117] "RemoveContainer" containerID="6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.512210 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c\": container with ID starting with 6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c not found: ID does not exist" containerID="6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.512292 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c"} err="failed to get container status \"6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c\": rpc error: code = NotFound desc = could not find container \"6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c\": container with ID starting with 6fa3a3187034da4b5301573d17c6edd8addda7a3e1bf04bb6e8a49d92bac909c not found: ID does not exist" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.512354 4941 scope.go:117] "RemoveContainer" containerID="4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea" Nov 30 07:04:27 crc kubenswrapper[4941]: E1130 07:04:27.513080 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea\": container with ID starting with 4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea not found: ID does not exist" containerID="4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.513174 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea"} err="failed to get container status \"4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea\": rpc error: code = NotFound desc = could not find container \"4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea\": container with ID starting with 4f6c4875c73302c4d1b7a31174ebf1b3d340d24c9d887fb25eb54b096ebb75ea not found: ID does not exist" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.520355 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c0c5-account-create-update-d484t"] Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.532570 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc8n2\" (UniqueName: \"kubernetes.io/projected/61013b6e-d5e6-4867-b2b9-e88c540ad362-kube-api-access-lc8n2\") pod \"glance-db-create-s2h5c\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.534398 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61013b6e-d5e6-4867-b2b9-e88c540ad362-operator-scripts\") pod \"glance-db-create-s2h5c\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.539248 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4f2b928-a1f9-4a85-a094-ea6c19e7c110" path="/var/lib/kubelet/pods/a4f2b928-a1f9-4a85-a094-ea6c19e7c110/volumes" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.637007 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61013b6e-d5e6-4867-b2b9-e88c540ad362-operator-scripts\") pod \"glance-db-create-s2h5c\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.637269 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdn94\" (UniqueName: \"kubernetes.io/projected/c8af3337-ba7b-41d6-915b-d9eeb7443354-kube-api-access-pdn94\") pod \"glance-c0c5-account-create-update-d484t\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.637384 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8af3337-ba7b-41d6-915b-d9eeb7443354-operator-scripts\") pod \"glance-c0c5-account-create-update-d484t\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.637437 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc8n2\" (UniqueName: \"kubernetes.io/projected/61013b6e-d5e6-4867-b2b9-e88c540ad362-kube-api-access-lc8n2\") pod \"glance-db-create-s2h5c\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.638399 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61013b6e-d5e6-4867-b2b9-e88c540ad362-operator-scripts\") pod \"glance-db-create-s2h5c\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.658622 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc8n2\" (UniqueName: \"kubernetes.io/projected/61013b6e-d5e6-4867-b2b9-e88c540ad362-kube-api-access-lc8n2\") pod \"glance-db-create-s2h5c\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.722569 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.739917 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdn94\" (UniqueName: \"kubernetes.io/projected/c8af3337-ba7b-41d6-915b-d9eeb7443354-kube-api-access-pdn94\") pod \"glance-c0c5-account-create-update-d484t\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.739983 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8af3337-ba7b-41d6-915b-d9eeb7443354-operator-scripts\") pod \"glance-c0c5-account-create-update-d484t\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.740819 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8af3337-ba7b-41d6-915b-d9eeb7443354-operator-scripts\") pod \"glance-c0c5-account-create-update-d484t\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.764326 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdn94\" (UniqueName: \"kubernetes.io/projected/c8af3337-ba7b-41d6-915b-d9eeb7443354-kube-api-access-pdn94\") pod \"glance-c0c5-account-create-update-d484t\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:27 crc kubenswrapper[4941]: I1130 07:04:27.826766 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.181400 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-s2h5c"] Nov 30 07:04:28 crc kubenswrapper[4941]: W1130 07:04:28.187041 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61013b6e_d5e6_4867_b2b9_e88c540ad362.slice/crio-be754e5908b77188a9c22d188267f1b60976d4bcbbfa788ef070c8e6fdb7ebb2 WatchSource:0}: Error finding container be754e5908b77188a9c22d188267f1b60976d4bcbbfa788ef070c8e6fdb7ebb2: Status 404 returned error can't find the container with id be754e5908b77188a9c22d188267f1b60976d4bcbbfa788ef070c8e6fdb7ebb2 Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.307949 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-c0c5-account-create-update-d484t"] Nov 30 07:04:28 crc kubenswrapper[4941]: W1130 07:04:28.316592 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc8af3337_ba7b_41d6_915b_d9eeb7443354.slice/crio-759776e7bbc6e629511746c605876c74e3e87fddfc679b2ecc8914869e130b48 WatchSource:0}: Error finding container 759776e7bbc6e629511746c605876c74e3e87fddfc679b2ecc8914869e130b48: Status 404 returned error can't find the container with id 759776e7bbc6e629511746c605876c74e3e87fddfc679b2ecc8914869e130b48 Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.450235 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c0c5-account-create-update-d484t" event={"ID":"c8af3337-ba7b-41d6-915b-d9eeb7443354","Type":"ContainerStarted","Data":"acede44e91df8abe2ec238513573f98b392bd74f31e0181fc783a56bd22a6f4e"} Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.450278 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c0c5-account-create-update-d484t" event={"ID":"c8af3337-ba7b-41d6-915b-d9eeb7443354","Type":"ContainerStarted","Data":"759776e7bbc6e629511746c605876c74e3e87fddfc679b2ecc8914869e130b48"} Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.454887 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2h5c" event={"ID":"61013b6e-d5e6-4867-b2b9-e88c540ad362","Type":"ContainerStarted","Data":"8ddf483285cd2430d6cec8c7625c079637a8728f27cdd14f85211a9e2e621640"} Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.455018 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2h5c" event={"ID":"61013b6e-d5e6-4867-b2b9-e88c540ad362","Type":"ContainerStarted","Data":"be754e5908b77188a9c22d188267f1b60976d4bcbbfa788ef070c8e6fdb7ebb2"} Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.472367 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-c0c5-account-create-update-d484t" podStartSLOduration=1.4723156849999999 podStartE2EDuration="1.472315685s" podCreationTimestamp="2025-11-30 07:04:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:28.465937757 +0000 UTC m=+1089.234109366" watchObservedRunningTime="2025-11-30 07:04:28.472315685 +0000 UTC m=+1089.240487294" Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.499645 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-s2h5c" podStartSLOduration=1.499627145 podStartE2EDuration="1.499627145s" podCreationTimestamp="2025-11-30 07:04:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:28.493960499 +0000 UTC m=+1089.262132108" watchObservedRunningTime="2025-11-30 07:04:28.499627145 +0000 UTC m=+1089.267798754" Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.907483 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-wg6p7"] Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.909400 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:28 crc kubenswrapper[4941]: I1130 07:04:28.929930 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-wg6p7"] Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.063466 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-dns-svc\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.063561 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.063819 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-config\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.063886 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.063932 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp85m\" (UniqueName: \"kubernetes.io/projected/f2064ce3-0785-4fa0-913d-c24f55138a87-kube-api-access-jp85m\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.165499 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-dns-svc\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.165607 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.165666 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-config\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.165697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.165717 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp85m\" (UniqueName: \"kubernetes.io/projected/f2064ce3-0785-4fa0-913d-c24f55138a87-kube-api-access-jp85m\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.166562 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-dns-svc\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.166798 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-config\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.167167 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-nb\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.167242 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-sb\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.187638 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp85m\" (UniqueName: \"kubernetes.io/projected/f2064ce3-0785-4fa0-913d-c24f55138a87-kube-api-access-jp85m\") pod \"dnsmasq-dns-5f6d79597f-wg6p7\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.227706 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.464803 4941 generic.go:334] "Generic (PLEG): container finished" podID="c8af3337-ba7b-41d6-915b-d9eeb7443354" containerID="acede44e91df8abe2ec238513573f98b392bd74f31e0181fc783a56bd22a6f4e" exitCode=0 Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.464881 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c0c5-account-create-update-d484t" event={"ID":"c8af3337-ba7b-41d6-915b-d9eeb7443354","Type":"ContainerDied","Data":"acede44e91df8abe2ec238513573f98b392bd74f31e0181fc783a56bd22a6f4e"} Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.467606 4941 generic.go:334] "Generic (PLEG): container finished" podID="61013b6e-d5e6-4867-b2b9-e88c540ad362" containerID="8ddf483285cd2430d6cec8c7625c079637a8728f27cdd14f85211a9e2e621640" exitCode=0 Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.467641 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2h5c" event={"ID":"61013b6e-d5e6-4867-b2b9-e88c540ad362","Type":"ContainerDied","Data":"8ddf483285cd2430d6cec8c7625c079637a8728f27cdd14f85211a9e2e621640"} Nov 30 07:04:29 crc kubenswrapper[4941]: I1130 07:04:29.861399 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-wg6p7"] Nov 30 07:04:29 crc kubenswrapper[4941]: W1130 07:04:29.868462 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2064ce3_0785_4fa0_913d_c24f55138a87.slice/crio-ea6bf0336014c20654ca9d472bdad10e9bf6000e427a8f7d44092e24c37e7272 WatchSource:0}: Error finding container ea6bf0336014c20654ca9d472bdad10e9bf6000e427a8f7d44092e24c37e7272: Status 404 returned error can't find the container with id ea6bf0336014c20654ca9d472bdad10e9bf6000e427a8f7d44092e24c37e7272 Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.044772 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.050077 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.052198 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.052815 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.052965 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tsqcw" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.053077 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.080672 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.184618 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.185235 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.185367 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-cache\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.185520 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-lock\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.185558 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhs6p\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-kube-api-access-mhs6p\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.287900 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-lock\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288363 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhs6p\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-kube-api-access-mhs6p\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288430 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288446 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-lock\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288477 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-cache\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: E1130 07:04:30.288630 4941 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 30 07:04:30 crc kubenswrapper[4941]: E1130 07:04:30.288645 4941 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 30 07:04:30 crc kubenswrapper[4941]: E1130 07:04:30.288696 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift podName:e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca nodeName:}" failed. No retries permitted until 2025-11-30 07:04:30.788674681 +0000 UTC m=+1091.556846290 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift") pod "swift-storage-0" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca") : configmap "swift-ring-files" not found Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288962 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-cache\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.288940 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.313101 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhs6p\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-kube-api-access-mhs6p\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.320636 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.479804 4941 generic.go:334] "Generic (PLEG): container finished" podID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerID="72615f93172b5338c28e3b495d4060d37ace32a7acd2283df14a81f08bdd28ed" exitCode=0 Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.479930 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" event={"ID":"f2064ce3-0785-4fa0-913d-c24f55138a87","Type":"ContainerDied","Data":"72615f93172b5338c28e3b495d4060d37ace32a7acd2283df14a81f08bdd28ed"} Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.479964 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" event={"ID":"f2064ce3-0785-4fa0-913d-c24f55138a87","Type":"ContainerStarted","Data":"ea6bf0336014c20654ca9d472bdad10e9bf6000e427a8f7d44092e24c37e7272"} Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.551460 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-lknbl"] Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.553930 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.561814 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.562137 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.570894 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.575023 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-lknbl"] Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.697891 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q27b\" (UniqueName: \"kubernetes.io/projected/2c1461b3-8d1d-4812-a241-ad0a1a962c35-kube-api-access-9q27b\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.697953 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-swiftconf\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.698007 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2c1461b3-8d1d-4812-a241-ad0a1a962c35-etc-swift\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.698088 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-scripts\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.698137 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-combined-ca-bundle\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.698200 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-ring-data-devices\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.698243 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-dispersionconf\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.779857 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799470 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q27b\" (UniqueName: \"kubernetes.io/projected/2c1461b3-8d1d-4812-a241-ad0a1a962c35-kube-api-access-9q27b\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799519 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-swiftconf\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799563 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2c1461b3-8d1d-4812-a241-ad0a1a962c35-etc-swift\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799718 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-scripts\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799769 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799793 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-combined-ca-bundle\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799843 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-ring-data-devices\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.799875 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-dispersionconf\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.802433 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-scripts\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.803008 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2c1461b3-8d1d-4812-a241-ad0a1a962c35-etc-swift\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.804297 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-ring-data-devices\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: E1130 07:04:30.804434 4941 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 30 07:04:30 crc kubenswrapper[4941]: E1130 07:04:30.804455 4941 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 30 07:04:30 crc kubenswrapper[4941]: E1130 07:04:30.804493 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift podName:e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca nodeName:}" failed. No retries permitted until 2025-11-30 07:04:31.804478524 +0000 UTC m=+1092.572650133 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift") pod "swift-storage-0" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca") : configmap "swift-ring-files" not found Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.804628 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-dispersionconf\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.806990 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-swiftconf\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.807462 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.811115 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-combined-ca-bundle\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.833127 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q27b\" (UniqueName: \"kubernetes.io/projected/2c1461b3-8d1d-4812-a241-ad0a1a962c35-kube-api-access-9q27b\") pod \"swift-ring-rebalance-lknbl\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.887774 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.900968 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61013b6e-d5e6-4867-b2b9-e88c540ad362-operator-scripts\") pod \"61013b6e-d5e6-4867-b2b9-e88c540ad362\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.901173 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdn94\" (UniqueName: \"kubernetes.io/projected/c8af3337-ba7b-41d6-915b-d9eeb7443354-kube-api-access-pdn94\") pod \"c8af3337-ba7b-41d6-915b-d9eeb7443354\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.901238 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc8n2\" (UniqueName: \"kubernetes.io/projected/61013b6e-d5e6-4867-b2b9-e88c540ad362-kube-api-access-lc8n2\") pod \"61013b6e-d5e6-4867-b2b9-e88c540ad362\" (UID: \"61013b6e-d5e6-4867-b2b9-e88c540ad362\") " Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.901270 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8af3337-ba7b-41d6-915b-d9eeb7443354-operator-scripts\") pod \"c8af3337-ba7b-41d6-915b-d9eeb7443354\" (UID: \"c8af3337-ba7b-41d6-915b-d9eeb7443354\") " Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.902212 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8af3337-ba7b-41d6-915b-d9eeb7443354-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c8af3337-ba7b-41d6-915b-d9eeb7443354" (UID: "c8af3337-ba7b-41d6-915b-d9eeb7443354"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.902602 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61013b6e-d5e6-4867-b2b9-e88c540ad362-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61013b6e-d5e6-4867-b2b9-e88c540ad362" (UID: "61013b6e-d5e6-4867-b2b9-e88c540ad362"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.906214 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8af3337-ba7b-41d6-915b-d9eeb7443354-kube-api-access-pdn94" (OuterVolumeSpecName: "kube-api-access-pdn94") pod "c8af3337-ba7b-41d6-915b-d9eeb7443354" (UID: "c8af3337-ba7b-41d6-915b-d9eeb7443354"). InnerVolumeSpecName "kube-api-access-pdn94". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:30 crc kubenswrapper[4941]: I1130 07:04:30.906312 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61013b6e-d5e6-4867-b2b9-e88c540ad362-kube-api-access-lc8n2" (OuterVolumeSpecName: "kube-api-access-lc8n2") pod "61013b6e-d5e6-4867-b2b9-e88c540ad362" (UID: "61013b6e-d5e6-4867-b2b9-e88c540ad362"). InnerVolumeSpecName "kube-api-access-lc8n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.002722 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdn94\" (UniqueName: \"kubernetes.io/projected/c8af3337-ba7b-41d6-915b-d9eeb7443354-kube-api-access-pdn94\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.002754 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc8n2\" (UniqueName: \"kubernetes.io/projected/61013b6e-d5e6-4867-b2b9-e88c540ad362-kube-api-access-lc8n2\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.002767 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8af3337-ba7b-41d6-915b-d9eeb7443354-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.002780 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61013b6e-d5e6-4867-b2b9-e88c540ad362-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.341743 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-lknbl"] Nov 30 07:04:31 crc kubenswrapper[4941]: W1130 07:04:31.356555 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c1461b3_8d1d_4812_a241_ad0a1a962c35.slice/crio-9af43fdb558e599eec9ac92538616ee00f993aec7ff25db2150ff16a47abeb2f WatchSource:0}: Error finding container 9af43fdb558e599eec9ac92538616ee00f993aec7ff25db2150ff16a47abeb2f: Status 404 returned error can't find the container with id 9af43fdb558e599eec9ac92538616ee00f993aec7ff25db2150ff16a47abeb2f Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.490050 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" event={"ID":"f2064ce3-0785-4fa0-913d-c24f55138a87","Type":"ContainerStarted","Data":"cfa64cfa6ce7ea072fbcdd1db81afa8d60832fe8d0ea99c2a0036c7e0e231ce5"} Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.490205 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.493398 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-c0c5-account-create-update-d484t" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.493414 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-c0c5-account-create-update-d484t" event={"ID":"c8af3337-ba7b-41d6-915b-d9eeb7443354","Type":"ContainerDied","Data":"759776e7bbc6e629511746c605876c74e3e87fddfc679b2ecc8914869e130b48"} Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.493452 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="759776e7bbc6e629511746c605876c74e3e87fddfc679b2ecc8914869e130b48" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.495035 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-s2h5c" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.495043 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-s2h5c" event={"ID":"61013b6e-d5e6-4867-b2b9-e88c540ad362","Type":"ContainerDied","Data":"be754e5908b77188a9c22d188267f1b60976d4bcbbfa788ef070c8e6fdb7ebb2"} Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.495072 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be754e5908b77188a9c22d188267f1b60976d4bcbbfa788ef070c8e6fdb7ebb2" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.497114 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lknbl" event={"ID":"2c1461b3-8d1d-4812-a241-ad0a1a962c35","Type":"ContainerStarted","Data":"9af43fdb558e599eec9ac92538616ee00f993aec7ff25db2150ff16a47abeb2f"} Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.520339 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" podStartSLOduration=3.520310569 podStartE2EDuration="3.520310569s" podCreationTimestamp="2025-11-30 07:04:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:31.5123394 +0000 UTC m=+1092.280511019" watchObservedRunningTime="2025-11-30 07:04:31.520310569 +0000 UTC m=+1092.288482178" Nov 30 07:04:31 crc kubenswrapper[4941]: I1130 07:04:31.816615 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:31 crc kubenswrapper[4941]: E1130 07:04:31.816832 4941 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 30 07:04:31 crc kubenswrapper[4941]: E1130 07:04:31.817207 4941 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 30 07:04:31 crc kubenswrapper[4941]: E1130 07:04:31.817286 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift podName:e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca nodeName:}" failed. No retries permitted until 2025-11-30 07:04:33.817263406 +0000 UTC m=+1094.585435025 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift") pod "swift-storage-0" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca") : configmap "swift-ring-files" not found Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.691663 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-rskk6"] Nov 30 07:04:32 crc kubenswrapper[4941]: E1130 07:04:32.692078 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61013b6e-d5e6-4867-b2b9-e88c540ad362" containerName="mariadb-database-create" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.692095 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="61013b6e-d5e6-4867-b2b9-e88c540ad362" containerName="mariadb-database-create" Nov 30 07:04:32 crc kubenswrapper[4941]: E1130 07:04:32.692111 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8af3337-ba7b-41d6-915b-d9eeb7443354" containerName="mariadb-account-create-update" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.692119 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8af3337-ba7b-41d6-915b-d9eeb7443354" containerName="mariadb-account-create-update" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.692294 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8af3337-ba7b-41d6-915b-d9eeb7443354" containerName="mariadb-account-create-update" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.692310 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="61013b6e-d5e6-4867-b2b9-e88c540ad362" containerName="mariadb-database-create" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.692948 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.695398 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-vv8hz" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.700697 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.701667 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rskk6"] Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.840470 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-config-data\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.841041 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-db-sync-config-data\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.841138 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-combined-ca-bundle\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.841155 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh6x2\" (UniqueName: \"kubernetes.io/projected/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-kube-api-access-hh6x2\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.942856 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-combined-ca-bundle\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.942916 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh6x2\" (UniqueName: \"kubernetes.io/projected/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-kube-api-access-hh6x2\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.943094 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-config-data\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.943147 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-db-sync-config-data\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.951073 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-config-data\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.951114 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-combined-ca-bundle\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.959514 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-db-sync-config-data\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:32 crc kubenswrapper[4941]: I1130 07:04:32.963839 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh6x2\" (UniqueName: \"kubernetes.io/projected/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-kube-api-access-hh6x2\") pod \"glance-db-sync-rskk6\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:33 crc kubenswrapper[4941]: I1130 07:04:33.042246 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rskk6" Nov 30 07:04:33 crc kubenswrapper[4941]: I1130 07:04:33.094703 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 30 07:04:33 crc kubenswrapper[4941]: I1130 07:04:33.863464 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:33 crc kubenswrapper[4941]: E1130 07:04:33.863721 4941 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 30 07:04:33 crc kubenswrapper[4941]: E1130 07:04:33.863988 4941 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 30 07:04:33 crc kubenswrapper[4941]: E1130 07:04:33.864046 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift podName:e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca nodeName:}" failed. No retries permitted until 2025-11-30 07:04:37.864030537 +0000 UTC m=+1098.632202146 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift") pod "swift-storage-0" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca") : configmap "swift-ring-files" not found Nov 30 07:04:35 crc kubenswrapper[4941]: I1130 07:04:35.533851 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lknbl" event={"ID":"2c1461b3-8d1d-4812-a241-ad0a1a962c35","Type":"ContainerStarted","Data":"264d6282f2a198345fd9841d0e85c31aa2cff83866d86f1cfa4155e9d969338d"} Nov 30 07:04:35 crc kubenswrapper[4941]: I1130 07:04:35.556969 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-lknbl" podStartSLOduration=1.639905679 podStartE2EDuration="5.556948753s" podCreationTimestamp="2025-11-30 07:04:30 +0000 UTC" firstStartedPulling="2025-11-30 07:04:31.359102685 +0000 UTC m=+1092.127274294" lastFinishedPulling="2025-11-30 07:04:35.276145759 +0000 UTC m=+1096.044317368" observedRunningTime="2025-11-30 07:04:35.555920451 +0000 UTC m=+1096.324092060" watchObservedRunningTime="2025-11-30 07:04:35.556948753 +0000 UTC m=+1096.325120362" Nov 30 07:04:35 crc kubenswrapper[4941]: I1130 07:04:35.777883 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rskk6"] Nov 30 07:04:35 crc kubenswrapper[4941]: W1130 07:04:35.785617 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e2ae7f7_ed16_40e2_a5ee_638d5f95f0c1.slice/crio-6b2b75b8b910ab32e34ac7b551625344dcce2c572343ee43e69e6d98a36e0518 WatchSource:0}: Error finding container 6b2b75b8b910ab32e34ac7b551625344dcce2c572343ee43e69e6d98a36e0518: Status 404 returned error can't find the container with id 6b2b75b8b910ab32e34ac7b551625344dcce2c572343ee43e69e6d98a36e0518 Nov 30 07:04:36 crc kubenswrapper[4941]: I1130 07:04:36.540489 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rskk6" event={"ID":"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1","Type":"ContainerStarted","Data":"6b2b75b8b910ab32e34ac7b551625344dcce2c572343ee43e69e6d98a36e0518"} Nov 30 07:04:37 crc kubenswrapper[4941]: I1130 07:04:37.939482 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:37 crc kubenswrapper[4941]: E1130 07:04:37.939702 4941 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 30 07:04:37 crc kubenswrapper[4941]: E1130 07:04:37.940006 4941 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 30 07:04:37 crc kubenswrapper[4941]: E1130 07:04:37.940068 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift podName:e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca nodeName:}" failed. No retries permitted until 2025-11-30 07:04:45.940047826 +0000 UTC m=+1106.708219435 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift") pod "swift-storage-0" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca") : configmap "swift-ring-files" not found Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.229569 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.304674 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-d7tlx"] Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.304969 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="dnsmasq-dns" containerID="cri-o://f0e9148b7ea9aceb67086d995581b0c78637918d8931c2b43b089a091826c1ec" gracePeriod=10 Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.566904 4941 generic.go:334] "Generic (PLEG): container finished" podID="45978317-0f07-44da-8b74-fbaaec0e6105" containerID="4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479" exitCode=0 Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.566962 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"45978317-0f07-44da-8b74-fbaaec0e6105","Type":"ContainerDied","Data":"4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479"} Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.570941 4941 generic.go:334] "Generic (PLEG): container finished" podID="79cf65c4-a135-447d-b8ca-6c219b698395" containerID="f0e9148b7ea9aceb67086d995581b0c78637918d8931c2b43b089a091826c1ec" exitCode=0 Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.571025 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" event={"ID":"79cf65c4-a135-447d-b8ca-6c219b698395","Type":"ContainerDied","Data":"f0e9148b7ea9aceb67086d995581b0c78637918d8931c2b43b089a091826c1ec"} Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.583664 4941 generic.go:334] "Generic (PLEG): container finished" podID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerID="0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22" exitCode=0 Nov 30 07:04:39 crc kubenswrapper[4941]: I1130 07:04:39.583706 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4e7a5ee5-1f0c-4819-a375-891a5e2cea03","Type":"ContainerDied","Data":"0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22"} Nov 30 07:04:41 crc kubenswrapper[4941]: I1130 07:04:41.605181 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"45978317-0f07-44da-8b74-fbaaec0e6105","Type":"ContainerStarted","Data":"0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08"} Nov 30 07:04:41 crc kubenswrapper[4941]: I1130 07:04:41.607640 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:04:41 crc kubenswrapper[4941]: I1130 07:04:41.609511 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4e7a5ee5-1f0c-4819-a375-891a5e2cea03","Type":"ContainerStarted","Data":"9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f"} Nov 30 07:04:41 crc kubenswrapper[4941]: I1130 07:04:41.609787 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 30 07:04:41 crc kubenswrapper[4941]: I1130 07:04:41.647508 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.654627623 podStartE2EDuration="1m10.647491357s" podCreationTimestamp="2025-11-30 07:03:31 +0000 UTC" firstStartedPulling="2025-11-30 07:03:33.53435602 +0000 UTC m=+1034.302527629" lastFinishedPulling="2025-11-30 07:04:04.527219704 +0000 UTC m=+1065.295391363" observedRunningTime="2025-11-30 07:04:41.643352089 +0000 UTC m=+1102.411523718" watchObservedRunningTime="2025-11-30 07:04:41.647491357 +0000 UTC m=+1102.415662966" Nov 30 07:04:41 crc kubenswrapper[4941]: I1130 07:04:41.687660 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.650680104 podStartE2EDuration="1m9.687631759s" podCreationTimestamp="2025-11-30 07:03:32 +0000 UTC" firstStartedPulling="2025-11-30 07:03:34.437711768 +0000 UTC m=+1035.205883377" lastFinishedPulling="2025-11-30 07:04:05.474663423 +0000 UTC m=+1066.242835032" observedRunningTime="2025-11-30 07:04:41.676084989 +0000 UTC m=+1102.444256588" watchObservedRunningTime="2025-11-30 07:04:41.687631759 +0000 UTC m=+1102.455803378" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.000840 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zcbz9" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" probeResult="failure" output=< Nov 30 07:04:42 crc kubenswrapper[4941]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 30 07:04:42 crc kubenswrapper[4941]: > Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.057546 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.066858 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.293232 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zcbz9-config-r94t4"] Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.294522 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.296384 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.300948 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zcbz9-config-r94t4"] Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.331367 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run-ovn\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.331425 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-additional-scripts\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.331451 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-scripts\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.331527 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xfnv\" (UniqueName: \"kubernetes.io/projected/d45646ee-dd08-4c26-bc08-df0abd618f15-kube-api-access-8xfnv\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.331547 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-log-ovn\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.331576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.432947 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433263 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run-ovn\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433357 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433441 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run-ovn\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-additional-scripts\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433566 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-scripts\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433692 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xfnv\" (UniqueName: \"kubernetes.io/projected/d45646ee-dd08-4c26-bc08-df0abd618f15-kube-api-access-8xfnv\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433780 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-log-ovn\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.433943 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-log-ovn\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.434117 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-additional-scripts\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.435644 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-scripts\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.456865 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xfnv\" (UniqueName: \"kubernetes.io/projected/d45646ee-dd08-4c26-bc08-df0abd618f15-kube-api-access-8xfnv\") pod \"ovn-controller-zcbz9-config-r94t4\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:42 crc kubenswrapper[4941]: I1130 07:04:42.615704 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:43 crc kubenswrapper[4941]: I1130 07:04:43.633814 4941 generic.go:334] "Generic (PLEG): container finished" podID="2c1461b3-8d1d-4812-a241-ad0a1a962c35" containerID="264d6282f2a198345fd9841d0e85c31aa2cff83866d86f1cfa4155e9d969338d" exitCode=0 Nov 30 07:04:43 crc kubenswrapper[4941]: I1130 07:04:43.633921 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lknbl" event={"ID":"2c1461b3-8d1d-4812-a241-ad0a1a962c35","Type":"ContainerDied","Data":"264d6282f2a198345fd9841d0e85c31aa2cff83866d86f1cfa4155e9d969338d"} Nov 30 07:04:46 crc kubenswrapper[4941]: I1130 07:04:46.001677 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:46 crc kubenswrapper[4941]: I1130 07:04:46.014088 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"swift-storage-0\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " pod="openstack/swift-storage-0" Nov 30 07:04:46 crc kubenswrapper[4941]: I1130 07:04:46.267923 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Nov 30 07:04:46 crc kubenswrapper[4941]: I1130 07:04:46.269108 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 30 07:04:47 crc kubenswrapper[4941]: I1130 07:04:47.069569 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zcbz9" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" probeResult="failure" output=< Nov 30 07:04:47 crc kubenswrapper[4941]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 30 07:04:47 crc kubenswrapper[4941]: > Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.046238 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.069735 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.170820 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-sb\") pod \"79cf65c4-a135-447d-b8ca-6c219b698395\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.170879 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-dns-svc\") pod \"79cf65c4-a135-447d-b8ca-6c219b698395\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.170917 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-swiftconf\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.170947 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-scripts\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171018 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2c1461b3-8d1d-4812-a241-ad0a1a962c35-etc-swift\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171043 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-nb\") pod \"79cf65c4-a135-447d-b8ca-6c219b698395\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171081 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9q27b\" (UniqueName: \"kubernetes.io/projected/2c1461b3-8d1d-4812-a241-ad0a1a962c35-kube-api-access-9q27b\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171125 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-ring-data-devices\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171158 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-combined-ca-bundle\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171178 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-config\") pod \"79cf65c4-a135-447d-b8ca-6c219b698395\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171209 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgcr9\" (UniqueName: \"kubernetes.io/projected/79cf65c4-a135-447d-b8ca-6c219b698395-kube-api-access-cgcr9\") pod \"79cf65c4-a135-447d-b8ca-6c219b698395\" (UID: \"79cf65c4-a135-447d-b8ca-6c219b698395\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.171248 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-dispersionconf\") pod \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\" (UID: \"2c1461b3-8d1d-4812-a241-ad0a1a962c35\") " Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.174559 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.177318 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c1461b3-8d1d-4812-a241-ad0a1a962c35-kube-api-access-9q27b" (OuterVolumeSpecName: "kube-api-access-9q27b") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "kube-api-access-9q27b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.178274 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c1461b3-8d1d-4812-a241-ad0a1a962c35-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.186795 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79cf65c4-a135-447d-b8ca-6c219b698395-kube-api-access-cgcr9" (OuterVolumeSpecName: "kube-api-access-cgcr9") pod "79cf65c4-a135-447d-b8ca-6c219b698395" (UID: "79cf65c4-a135-447d-b8ca-6c219b698395"). InnerVolumeSpecName "kube-api-access-cgcr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.187154 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.213002 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.230094 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "79cf65c4-a135-447d-b8ca-6c219b698395" (UID: "79cf65c4-a135-447d-b8ca-6c219b698395"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.240718 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.254785 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-config" (OuterVolumeSpecName: "config") pod "79cf65c4-a135-447d-b8ca-6c219b698395" (UID: "79cf65c4-a135-447d-b8ca-6c219b698395"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.255175 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-scripts" (OuterVolumeSpecName: "scripts") pod "2c1461b3-8d1d-4812-a241-ad0a1a962c35" (UID: "2c1461b3-8d1d-4812-a241-ad0a1a962c35"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.274052 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "79cf65c4-a135-447d-b8ca-6c219b698395" (UID: "79cf65c4-a135-447d-b8ca-6c219b698395"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276625 4941 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2c1461b3-8d1d-4812-a241-ad0a1a962c35-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276654 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276665 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9q27b\" (UniqueName: \"kubernetes.io/projected/2c1461b3-8d1d-4812-a241-ad0a1a962c35-kube-api-access-9q27b\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276677 4941 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-ring-data-devices\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276685 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276697 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276705 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgcr9\" (UniqueName: \"kubernetes.io/projected/79cf65c4-a135-447d-b8ca-6c219b698395-kube-api-access-cgcr9\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276714 4941 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-dispersionconf\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276722 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276731 4941 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2c1461b3-8d1d-4812-a241-ad0a1a962c35-swiftconf\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.276739 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2c1461b3-8d1d-4812-a241-ad0a1a962c35-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.282804 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "79cf65c4-a135-447d-b8ca-6c219b698395" (UID: "79cf65c4-a135-447d-b8ca-6c219b698395"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.378568 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79cf65c4-a135-447d-b8ca-6c219b698395-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.423104 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zcbz9-config-r94t4"] Nov 30 07:04:49 crc kubenswrapper[4941]: W1130 07:04:49.429142 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd45646ee_dd08_4c26_bc08_df0abd618f15.slice/crio-4b3673c5196335004f4ae7711292fa6d29dc4378bf106330daef0dd69d46bebc WatchSource:0}: Error finding container 4b3673c5196335004f4ae7711292fa6d29dc4378bf106330daef0dd69d46bebc: Status 404 returned error can't find the container with id 4b3673c5196335004f4ae7711292fa6d29dc4378bf106330daef0dd69d46bebc Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.626986 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.690609 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9-config-r94t4" event={"ID":"d45646ee-dd08-4c26-bc08-df0abd618f15","Type":"ContainerStarted","Data":"4b3673c5196335004f4ae7711292fa6d29dc4378bf106330daef0dd69d46bebc"} Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.692698 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" event={"ID":"79cf65c4-a135-447d-b8ca-6c219b698395","Type":"ContainerDied","Data":"e907d3da9e67bf2bf6db3e3d0dd5f10ea797cd49229adea15a3ebfddaaab1574"} Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.692750 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.692779 4941 scope.go:117] "RemoveContainer" containerID="f0e9148b7ea9aceb67086d995581b0c78637918d8931c2b43b089a091826c1ec" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.695563 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-lknbl" event={"ID":"2c1461b3-8d1d-4812-a241-ad0a1a962c35","Type":"ContainerDied","Data":"9af43fdb558e599eec9ac92538616ee00f993aec7ff25db2150ff16a47abeb2f"} Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.695627 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9af43fdb558e599eec9ac92538616ee00f993aec7ff25db2150ff16a47abeb2f" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.695624 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lknbl" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.698971 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"528e1c3a07f331f4482cc697dc5f8de6c640ce9f0544cb8518a4ad1d933d98e4"} Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.719505 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-d7tlx"] Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.726131 4941 scope.go:117] "RemoveContainer" containerID="1db871b8a00f83aec555cbdd8a63670b3bf4a51f652a0f30641ce6a0ae860880" Nov 30 07:04:49 crc kubenswrapper[4941]: I1130 07:04:49.728984 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bd7c66845-d7tlx"] Nov 30 07:04:50 crc kubenswrapper[4941]: I1130 07:04:50.740853 4941 generic.go:334] "Generic (PLEG): container finished" podID="d45646ee-dd08-4c26-bc08-df0abd618f15" containerID="913c25abf91aa4ed662ae2fe67e807f82a53a365f35a622ebcc77a8a04941143" exitCode=0 Nov 30 07:04:50 crc kubenswrapper[4941]: I1130 07:04:50.741039 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9-config-r94t4" event={"ID":"d45646ee-dd08-4c26-bc08-df0abd618f15","Type":"ContainerDied","Data":"913c25abf91aa4ed662ae2fe67e807f82a53a365f35a622ebcc77a8a04941143"} Nov 30 07:04:50 crc kubenswrapper[4941]: I1130 07:04:50.747492 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rskk6" event={"ID":"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1","Type":"ContainerStarted","Data":"b0c2c7fa1005f08c39968a1a685ca3212988968efd0e99b914c94cc6cf534f58"} Nov 30 07:04:50 crc kubenswrapper[4941]: I1130 07:04:50.788746 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-rskk6" podStartSLOduration=5.676684172 podStartE2EDuration="18.788724132s" podCreationTimestamp="2025-11-30 07:04:32 +0000 UTC" firstStartedPulling="2025-11-30 07:04:35.788732652 +0000 UTC m=+1096.556904301" lastFinishedPulling="2025-11-30 07:04:48.900772632 +0000 UTC m=+1109.668944261" observedRunningTime="2025-11-30 07:04:50.783235321 +0000 UTC m=+1111.551407000" watchObservedRunningTime="2025-11-30 07:04:50.788724132 +0000 UTC m=+1111.556895741" Nov 30 07:04:51 crc kubenswrapper[4941]: I1130 07:04:51.268805 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5bd7c66845-d7tlx" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Nov 30 07:04:51 crc kubenswrapper[4941]: I1130 07:04:51.535903 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" path="/var/lib/kubelet/pods/79cf65c4-a135-447d-b8ca-6c219b698395/volumes" Nov 30 07:04:51 crc kubenswrapper[4941]: I1130 07:04:51.757662 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897"} Nov 30 07:04:51 crc kubenswrapper[4941]: I1130 07:04:51.759253 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d"} Nov 30 07:04:51 crc kubenswrapper[4941]: I1130 07:04:51.759390 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893"} Nov 30 07:04:51 crc kubenswrapper[4941]: I1130 07:04:51.759493 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5"} Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.021043 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-zcbz9" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.144568 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.240831 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run\") pod \"d45646ee-dd08-4c26-bc08-df0abd618f15\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.240931 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-log-ovn\") pod \"d45646ee-dd08-4c26-bc08-df0abd618f15\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.240972 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-additional-scripts\") pod \"d45646ee-dd08-4c26-bc08-df0abd618f15\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.240990 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run" (OuterVolumeSpecName: "var-run") pod "d45646ee-dd08-4c26-bc08-df0abd618f15" (UID: "d45646ee-dd08-4c26-bc08-df0abd618f15"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.241046 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d45646ee-dd08-4c26-bc08-df0abd618f15" (UID: "d45646ee-dd08-4c26-bc08-df0abd618f15"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.241064 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run-ovn\") pod \"d45646ee-dd08-4c26-bc08-df0abd618f15\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.241120 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-scripts\") pod \"d45646ee-dd08-4c26-bc08-df0abd618f15\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.241184 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xfnv\" (UniqueName: \"kubernetes.io/projected/d45646ee-dd08-4c26-bc08-df0abd618f15-kube-api-access-8xfnv\") pod \"d45646ee-dd08-4c26-bc08-df0abd618f15\" (UID: \"d45646ee-dd08-4c26-bc08-df0abd618f15\") " Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.241606 4941 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.241629 4941 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.242022 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d45646ee-dd08-4c26-bc08-df0abd618f15" (UID: "d45646ee-dd08-4c26-bc08-df0abd618f15"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.242101 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d45646ee-dd08-4c26-bc08-df0abd618f15" (UID: "d45646ee-dd08-4c26-bc08-df0abd618f15"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.242860 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-scripts" (OuterVolumeSpecName: "scripts") pod "d45646ee-dd08-4c26-bc08-df0abd618f15" (UID: "d45646ee-dd08-4c26-bc08-df0abd618f15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.255757 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d45646ee-dd08-4c26-bc08-df0abd618f15-kube-api-access-8xfnv" (OuterVolumeSpecName: "kube-api-access-8xfnv") pod "d45646ee-dd08-4c26-bc08-df0abd618f15" (UID: "d45646ee-dd08-4c26-bc08-df0abd618f15"). InnerVolumeSpecName "kube-api-access-8xfnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.343971 4941 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.344024 4941 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d45646ee-dd08-4c26-bc08-df0abd618f15-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.344036 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d45646ee-dd08-4c26-bc08-df0abd618f15-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.344045 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xfnv\" (UniqueName: \"kubernetes.io/projected/d45646ee-dd08-4c26-bc08-df0abd618f15-kube-api-access-8xfnv\") on node \"crc\" DevicePath \"\"" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.770315 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9-config-r94t4" event={"ID":"d45646ee-dd08-4c26-bc08-df0abd618f15","Type":"ContainerDied","Data":"4b3673c5196335004f4ae7711292fa6d29dc4378bf106330daef0dd69d46bebc"} Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.770376 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b3673c5196335004f4ae7711292fa6d29dc4378bf106330daef0dd69d46bebc" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.770461 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9-config-r94t4" Nov 30 07:04:52 crc kubenswrapper[4941]: I1130 07:04:52.878221 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:04:53 crc kubenswrapper[4941]: I1130 07:04:53.268211 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-zcbz9-config-r94t4"] Nov 30 07:04:53 crc kubenswrapper[4941]: I1130 07:04:53.282956 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-zcbz9-config-r94t4"] Nov 30 07:04:53 crc kubenswrapper[4941]: I1130 07:04:53.533359 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d45646ee-dd08-4c26-bc08-df0abd618f15" path="/var/lib/kubelet/pods/d45646ee-dd08-4c26-bc08-df0abd618f15/volumes" Nov 30 07:04:53 crc kubenswrapper[4941]: I1130 07:04:53.811554 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.646017 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-2hzwv"] Nov 30 07:04:54 crc kubenswrapper[4941]: E1130 07:04:54.646695 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="init" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.646721 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="init" Nov 30 07:04:54 crc kubenswrapper[4941]: E1130 07:04:54.646751 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d45646ee-dd08-4c26-bc08-df0abd618f15" containerName="ovn-config" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.646761 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d45646ee-dd08-4c26-bc08-df0abd618f15" containerName="ovn-config" Nov 30 07:04:54 crc kubenswrapper[4941]: E1130 07:04:54.646771 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="dnsmasq-dns" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.646778 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="dnsmasq-dns" Nov 30 07:04:54 crc kubenswrapper[4941]: E1130 07:04:54.646790 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c1461b3-8d1d-4812-a241-ad0a1a962c35" containerName="swift-ring-rebalance" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.646798 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c1461b3-8d1d-4812-a241-ad0a1a962c35" containerName="swift-ring-rebalance" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.647041 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d45646ee-dd08-4c26-bc08-df0abd618f15" containerName="ovn-config" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.647067 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c1461b3-8d1d-4812-a241-ad0a1a962c35" containerName="swift-ring-rebalance" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.647081 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="79cf65c4-a135-447d-b8ca-6c219b698395" containerName="dnsmasq-dns" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.647887 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.655442 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-75ae-account-create-update-fl25n"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.656940 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.659555 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.669213 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-2hzwv"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.682096 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-75ae-account-create-update-fl25n"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.707233 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-operator-scripts\") pod \"barbican-db-create-2hzwv\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.707457 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3664bd8-d8a8-4d42-b983-ee81649d4db2-operator-scripts\") pod \"barbican-75ae-account-create-update-fl25n\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.707526 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td6dt\" (UniqueName: \"kubernetes.io/projected/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-kube-api-access-td6dt\") pod \"barbican-db-create-2hzwv\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.707785 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w46v8\" (UniqueName: \"kubernetes.io/projected/f3664bd8-d8a8-4d42-b983-ee81649d4db2-kube-api-access-w46v8\") pod \"barbican-75ae-account-create-update-fl25n\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.738735 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-29mwc"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.739773 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.769669 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-29mwc"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.809458 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3664bd8-d8a8-4d42-b983-ee81649d4db2-operator-scripts\") pod \"barbican-75ae-account-create-update-fl25n\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.809512 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td6dt\" (UniqueName: \"kubernetes.io/projected/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-kube-api-access-td6dt\") pod \"barbican-db-create-2hzwv\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.809537 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4fqg\" (UniqueName: \"kubernetes.io/projected/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-kube-api-access-q4fqg\") pod \"cinder-db-create-29mwc\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.809567 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w46v8\" (UniqueName: \"kubernetes.io/projected/f3664bd8-d8a8-4d42-b983-ee81649d4db2-kube-api-access-w46v8\") pod \"barbican-75ae-account-create-update-fl25n\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.809619 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-operator-scripts\") pod \"cinder-db-create-29mwc\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.809653 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-operator-scripts\") pod \"barbican-db-create-2hzwv\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.810466 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-operator-scripts\") pod \"barbican-db-create-2hzwv\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.810507 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3664bd8-d8a8-4d42-b983-ee81649d4db2-operator-scripts\") pod \"barbican-75ae-account-create-update-fl25n\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.841171 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-43da-account-create-update-9jzkv"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.849011 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.865784 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-43da-account-create-update-9jzkv"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.876648 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.879683 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w46v8\" (UniqueName: \"kubernetes.io/projected/f3664bd8-d8a8-4d42-b983-ee81649d4db2-kube-api-access-w46v8\") pod \"barbican-75ae-account-create-update-fl25n\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.880237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td6dt\" (UniqueName: \"kubernetes.io/projected/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-kube-api-access-td6dt\") pod \"barbican-db-create-2hzwv\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.912490 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-operator-scripts\") pod \"cinder-db-create-29mwc\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.913112 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hhsb\" (UniqueName: \"kubernetes.io/projected/0851a1f1-5a9f-4609-b638-33da1fca0f01-kube-api-access-2hhsb\") pod \"cinder-43da-account-create-update-9jzkv\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.913199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0851a1f1-5a9f-4609-b638-33da1fca0f01-operator-scripts\") pod \"cinder-43da-account-create-update-9jzkv\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.913228 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4fqg\" (UniqueName: \"kubernetes.io/projected/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-kube-api-access-q4fqg\") pod \"cinder-db-create-29mwc\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.913585 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-operator-scripts\") pod \"cinder-db-create-29mwc\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.961228 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-xpsqj"] Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.969268 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4fqg\" (UniqueName: \"kubernetes.io/projected/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-kube-api-access-q4fqg\") pod \"cinder-db-create-29mwc\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.971633 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2hzwv" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.975634 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:54 crc kubenswrapper[4941]: I1130 07:04:54.988612 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.001952 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xpsqj"] Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.018370 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dee8aa58-2a9c-4597-9d06-629fa6e37648-operator-scripts\") pod \"neutron-db-create-xpsqj\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.018518 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hhsb\" (UniqueName: \"kubernetes.io/projected/0851a1f1-5a9f-4609-b638-33da1fca0f01-kube-api-access-2hhsb\") pod \"cinder-43da-account-create-update-9jzkv\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.018637 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0851a1f1-5a9f-4609-b638-33da1fca0f01-operator-scripts\") pod \"cinder-43da-account-create-update-9jzkv\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.018752 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr84f\" (UniqueName: \"kubernetes.io/projected/dee8aa58-2a9c-4597-9d06-629fa6e37648-kube-api-access-gr84f\") pod \"neutron-db-create-xpsqj\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.019745 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0851a1f1-5a9f-4609-b638-33da1fca0f01-operator-scripts\") pod \"cinder-43da-account-create-update-9jzkv\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.060665 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29mwc" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.068302 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hhsb\" (UniqueName: \"kubernetes.io/projected/0851a1f1-5a9f-4609-b638-33da1fca0f01-kube-api-access-2hhsb\") pod \"cinder-43da-account-create-update-9jzkv\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.083241 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-prvmj"] Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.084694 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.093024 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.093181 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.093285 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.093506 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ds5bd" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.115822 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-a2cc-account-create-update-qdnx2"] Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.117373 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.121110 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8pgw\" (UniqueName: \"kubernetes.io/projected/d0ce952e-a290-4b54-b720-c34632cef479-kube-api-access-g8pgw\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.122990 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr84f\" (UniqueName: \"kubernetes.io/projected/dee8aa58-2a9c-4597-9d06-629fa6e37648-kube-api-access-gr84f\") pod \"neutron-db-create-xpsqj\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.123057 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-config-data\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.123227 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-combined-ca-bundle\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.123270 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dee8aa58-2a9c-4597-9d06-629fa6e37648-operator-scripts\") pod \"neutron-db-create-xpsqj\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.127354 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dee8aa58-2a9c-4597-9d06-629fa6e37648-operator-scripts\") pod \"neutron-db-create-xpsqj\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.127930 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.143000 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-prvmj"] Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.161594 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a2cc-account-create-update-qdnx2"] Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.163403 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr84f\" (UniqueName: \"kubernetes.io/projected/dee8aa58-2a9c-4597-9d06-629fa6e37648-kube-api-access-gr84f\") pod \"neutron-db-create-xpsqj\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.256170 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx5g6\" (UniqueName: \"kubernetes.io/projected/eace6e2e-dba9-4430-9383-0cbcb862d675-kube-api-access-tx5g6\") pod \"neutron-a2cc-account-create-update-qdnx2\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.256574 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eace6e2e-dba9-4430-9383-0cbcb862d675-operator-scripts\") pod \"neutron-a2cc-account-create-update-qdnx2\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.256619 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8pgw\" (UniqueName: \"kubernetes.io/projected/d0ce952e-a290-4b54-b720-c34632cef479-kube-api-access-g8pgw\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.260040 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-config-data\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.260097 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-combined-ca-bundle\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.266783 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-config-data\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.266861 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-combined-ca-bundle\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.267203 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.284335 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8pgw\" (UniqueName: \"kubernetes.io/projected/d0ce952e-a290-4b54-b720-c34632cef479-kube-api-access-g8pgw\") pod \"keystone-db-sync-prvmj\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.361476 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx5g6\" (UniqueName: \"kubernetes.io/projected/eace6e2e-dba9-4430-9383-0cbcb862d675-kube-api-access-tx5g6\") pod \"neutron-a2cc-account-create-update-qdnx2\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.361529 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eace6e2e-dba9-4430-9383-0cbcb862d675-operator-scripts\") pod \"neutron-a2cc-account-create-update-qdnx2\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.362244 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eace6e2e-dba9-4430-9383-0cbcb862d675-operator-scripts\") pod \"neutron-a2cc-account-create-update-qdnx2\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.387270 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx5g6\" (UniqueName: \"kubernetes.io/projected/eace6e2e-dba9-4430-9383-0cbcb862d675-kube-api-access-tx5g6\") pod \"neutron-a2cc-account-create-update-qdnx2\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.408855 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xpsqj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.428435 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-prvmj" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.464845 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:04:55 crc kubenswrapper[4941]: I1130 07:04:55.822524 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-2hzwv"] Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.125402 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xpsqj"] Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.134560 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-29mwc"] Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.143667 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-prvmj"] Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.152635 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-43da-account-create-update-9jzkv"] Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.161678 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-75ae-account-create-update-fl25n"] Nov 30 07:04:56 crc kubenswrapper[4941]: W1130 07:04:56.216176 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3664bd8_d8a8_4d42_b983_ee81649d4db2.slice/crio-500c1d8a7c09088f77dde9539dad3a981d023867ab6b462a9ad768e9e8f1b575 WatchSource:0}: Error finding container 500c1d8a7c09088f77dde9539dad3a981d023867ab6b462a9ad768e9e8f1b575: Status 404 returned error can't find the container with id 500c1d8a7c09088f77dde9539dad3a981d023867ab6b462a9ad768e9e8f1b575 Nov 30 07:04:56 crc kubenswrapper[4941]: W1130 07:04:56.219796 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f4d49ca_a8ee_4cd9_acd3_349c3c616627.slice/crio-763fcbe082eb64b18ff9ab6186329639a753a6426f28dd5e9650710b1b628061 WatchSource:0}: Error finding container 763fcbe082eb64b18ff9ab6186329639a753a6426f28dd5e9650710b1b628061: Status 404 returned error can't find the container with id 763fcbe082eb64b18ff9ab6186329639a753a6426f28dd5e9650710b1b628061 Nov 30 07:04:56 crc kubenswrapper[4941]: W1130 07:04:56.220650 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddee8aa58_2a9c_4597_9d06_629fa6e37648.slice/crio-e3bd22c69a57a3d4a8d1b9504131ebb43821a05c0005350681180aee8f6d4ebc WatchSource:0}: Error finding container e3bd22c69a57a3d4a8d1b9504131ebb43821a05c0005350681180aee8f6d4ebc: Status 404 returned error can't find the container with id e3bd22c69a57a3d4a8d1b9504131ebb43821a05c0005350681180aee8f6d4ebc Nov 30 07:04:56 crc kubenswrapper[4941]: W1130 07:04:56.234996 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded5cfc02_cb0d_4b4c_a239_40f23890dbd3.slice/crio-c8d103ff76a0229bb67aa67c39fcbba10826400679d2f2602dde84f98eace153 WatchSource:0}: Error finding container c8d103ff76a0229bb67aa67c39fcbba10826400679d2f2602dde84f98eace153: Status 404 returned error can't find the container with id c8d103ff76a0229bb67aa67c39fcbba10826400679d2f2602dde84f98eace153 Nov 30 07:04:56 crc kubenswrapper[4941]: W1130 07:04:56.236495 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0ce952e_a290_4b54_b720_c34632cef479.slice/crio-ef9c1fb348173ba6a8b157e184931dbba046da4de4d9237fc38adae619d58c4b WatchSource:0}: Error finding container ef9c1fb348173ba6a8b157e184931dbba046da4de4d9237fc38adae619d58c4b: Status 404 returned error can't find the container with id ef9c1fb348173ba6a8b157e184931dbba046da4de4d9237fc38adae619d58c4b Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.304464 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a2cc-account-create-update-qdnx2"] Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.817018 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2hzwv" event={"ID":"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3","Type":"ContainerStarted","Data":"a9f3b9a4af1954fccf875c6a505fd0e6cd606fbf36e441a102a4d4d71fd40774"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.817453 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2hzwv" event={"ID":"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3","Type":"ContainerStarted","Data":"c8d103ff76a0229bb67aa67c39fcbba10826400679d2f2602dde84f98eace153"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.820230 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-prvmj" event={"ID":"d0ce952e-a290-4b54-b720-c34632cef479","Type":"ContainerStarted","Data":"ef9c1fb348173ba6a8b157e184931dbba046da4de4d9237fc38adae619d58c4b"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.825209 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-75ae-account-create-update-fl25n" event={"ID":"f3664bd8-d8a8-4d42-b983-ee81649d4db2","Type":"ContainerStarted","Data":"2fbb2c22c3103ac186df3be80937e50d5dbe7ab19422754df8bed50cf8d400b3"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.825250 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-75ae-account-create-update-fl25n" event={"ID":"f3664bd8-d8a8-4d42-b983-ee81649d4db2","Type":"ContainerStarted","Data":"500c1d8a7c09088f77dde9539dad3a981d023867ab6b462a9ad768e9e8f1b575"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.827151 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-43da-account-create-update-9jzkv" event={"ID":"0851a1f1-5a9f-4609-b638-33da1fca0f01","Type":"ContainerStarted","Data":"698a808256a2194b65e6223885ab5d564e66738858d71ad22d9e0b5a59262e12"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.827227 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-43da-account-create-update-9jzkv" event={"ID":"0851a1f1-5a9f-4609-b638-33da1fca0f01","Type":"ContainerStarted","Data":"822d4f31d5326d9de81501f75dd9933263865f667d4e4ada4415a8f70a8759c4"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.829141 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a2cc-account-create-update-qdnx2" event={"ID":"eace6e2e-dba9-4430-9383-0cbcb862d675","Type":"ContainerStarted","Data":"899456bfa6431f142919e5f8d16f0caeb94ab9bc705c9185b958c225ad7fdbcd"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.829196 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a2cc-account-create-update-qdnx2" event={"ID":"eace6e2e-dba9-4430-9383-0cbcb862d675","Type":"ContainerStarted","Data":"ea1df231dfa4333d4ca4286d0924effeae526c3cc14fd1e68061828401836717"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.832502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29mwc" event={"ID":"1f4d49ca-a8ee-4cd9-acd3-349c3c616627","Type":"ContainerStarted","Data":"70c42b65522d407746ecc1913faef76bac0918ec07d98dfe0a1642f8dcf3e157"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.832542 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29mwc" event={"ID":"1f4d49ca-a8ee-4cd9-acd3-349c3c616627","Type":"ContainerStarted","Data":"763fcbe082eb64b18ff9ab6186329639a753a6426f28dd5e9650710b1b628061"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.838481 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-2hzwv" podStartSLOduration=2.8384596269999998 podStartE2EDuration="2.838459627s" podCreationTimestamp="2025-11-30 07:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:56.831363736 +0000 UTC m=+1117.599535345" watchObservedRunningTime="2025-11-30 07:04:56.838459627 +0000 UTC m=+1117.606631236" Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.843572 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xpsqj" event={"ID":"dee8aa58-2a9c-4597-9d06-629fa6e37648","Type":"ContainerStarted","Data":"e3bd22c69a57a3d4a8d1b9504131ebb43821a05c0005350681180aee8f6d4ebc"} Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.883670 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-a2cc-account-create-update-qdnx2" podStartSLOduration=1.883648376 podStartE2EDuration="1.883648376s" podCreationTimestamp="2025-11-30 07:04:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:56.864905112 +0000 UTC m=+1117.633076721" watchObservedRunningTime="2025-11-30 07:04:56.883648376 +0000 UTC m=+1117.651819985" Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.909177 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-75ae-account-create-update-fl25n" podStartSLOduration=2.909154021 podStartE2EDuration="2.909154021s" podCreationTimestamp="2025-11-30 07:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:56.882314774 +0000 UTC m=+1117.650486393" watchObservedRunningTime="2025-11-30 07:04:56.909154021 +0000 UTC m=+1117.677325630" Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.933299 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-43da-account-create-update-9jzkv" podStartSLOduration=2.933280082 podStartE2EDuration="2.933280082s" podCreationTimestamp="2025-11-30 07:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:56.904802274 +0000 UTC m=+1117.672973873" watchObservedRunningTime="2025-11-30 07:04:56.933280082 +0000 UTC m=+1117.701451691" Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.942538 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-29mwc" podStartSLOduration=2.94251846 podStartE2EDuration="2.94251846s" podCreationTimestamp="2025-11-30 07:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:56.924012683 +0000 UTC m=+1117.692184292" watchObservedRunningTime="2025-11-30 07:04:56.94251846 +0000 UTC m=+1117.710690059" Nov 30 07:04:56 crc kubenswrapper[4941]: I1130 07:04:56.958118 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-xpsqj" podStartSLOduration=2.958091525 podStartE2EDuration="2.958091525s" podCreationTimestamp="2025-11-30 07:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:04:56.939906759 +0000 UTC m=+1117.708078368" watchObservedRunningTime="2025-11-30 07:04:56.958091525 +0000 UTC m=+1117.726263134" Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.860948 4941 generic.go:334] "Generic (PLEG): container finished" podID="dee8aa58-2a9c-4597-9d06-629fa6e37648" containerID="b1f933b51ff93fa7a1c8eb2e271d814e0c9d46c1eb93b1450edfc9e30a2294bd" exitCode=0 Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.861053 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xpsqj" event={"ID":"dee8aa58-2a9c-4597-9d06-629fa6e37648","Type":"ContainerDied","Data":"b1f933b51ff93fa7a1c8eb2e271d814e0c9d46c1eb93b1450edfc9e30a2294bd"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.863013 4941 generic.go:334] "Generic (PLEG): container finished" podID="ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" containerID="a9f3b9a4af1954fccf875c6a505fd0e6cd606fbf36e441a102a4d4d71fd40774" exitCode=0 Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.863075 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2hzwv" event={"ID":"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3","Type":"ContainerDied","Data":"a9f3b9a4af1954fccf875c6a505fd0e6cd606fbf36e441a102a4d4d71fd40774"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.865372 4941 generic.go:334] "Generic (PLEG): container finished" podID="f3664bd8-d8a8-4d42-b983-ee81649d4db2" containerID="2fbb2c22c3103ac186df3be80937e50d5dbe7ab19422754df8bed50cf8d400b3" exitCode=0 Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.865419 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-75ae-account-create-update-fl25n" event={"ID":"f3664bd8-d8a8-4d42-b983-ee81649d4db2","Type":"ContainerDied","Data":"2fbb2c22c3103ac186df3be80937e50d5dbe7ab19422754df8bed50cf8d400b3"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.867527 4941 generic.go:334] "Generic (PLEG): container finished" podID="0851a1f1-5a9f-4609-b638-33da1fca0f01" containerID="698a808256a2194b65e6223885ab5d564e66738858d71ad22d9e0b5a59262e12" exitCode=0 Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.867609 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-43da-account-create-update-9jzkv" event={"ID":"0851a1f1-5a9f-4609-b638-33da1fca0f01","Type":"ContainerDied","Data":"698a808256a2194b65e6223885ab5d564e66738858d71ad22d9e0b5a59262e12"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.870069 4941 generic.go:334] "Generic (PLEG): container finished" podID="eace6e2e-dba9-4430-9383-0cbcb862d675" containerID="899456bfa6431f142919e5f8d16f0caeb94ab9bc705c9185b958c225ad7fdbcd" exitCode=0 Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.870139 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a2cc-account-create-update-qdnx2" event={"ID":"eace6e2e-dba9-4430-9383-0cbcb862d675","Type":"ContainerDied","Data":"899456bfa6431f142919e5f8d16f0caeb94ab9bc705c9185b958c225ad7fdbcd"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.871751 4941 generic.go:334] "Generic (PLEG): container finished" podID="1f4d49ca-a8ee-4cd9-acd3-349c3c616627" containerID="70c42b65522d407746ecc1913faef76bac0918ec07d98dfe0a1642f8dcf3e157" exitCode=0 Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.871811 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29mwc" event={"ID":"1f4d49ca-a8ee-4cd9-acd3-349c3c616627","Type":"ContainerDied","Data":"70c42b65522d407746ecc1913faef76bac0918ec07d98dfe0a1642f8dcf3e157"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.884222 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.884266 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.884279 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5"} Nov 30 07:04:57 crc kubenswrapper[4941]: I1130 07:04:57.884287 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.742046 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2hzwv" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.754846 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.771555 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xpsqj" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.796714 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.807567 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29mwc" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.819434 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.820591 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hhsb\" (UniqueName: \"kubernetes.io/projected/0851a1f1-5a9f-4609-b638-33da1fca0f01-kube-api-access-2hhsb\") pod \"0851a1f1-5a9f-4609-b638-33da1fca0f01\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.820706 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr84f\" (UniqueName: \"kubernetes.io/projected/dee8aa58-2a9c-4597-9d06-629fa6e37648-kube-api-access-gr84f\") pod \"dee8aa58-2a9c-4597-9d06-629fa6e37648\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.820962 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eace6e2e-dba9-4430-9383-0cbcb862d675-operator-scripts\") pod \"eace6e2e-dba9-4430-9383-0cbcb862d675\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821013 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0851a1f1-5a9f-4609-b638-33da1fca0f01-operator-scripts\") pod \"0851a1f1-5a9f-4609-b638-33da1fca0f01\" (UID: \"0851a1f1-5a9f-4609-b638-33da1fca0f01\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821085 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-operator-scripts\") pod \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821202 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4fqg\" (UniqueName: \"kubernetes.io/projected/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-kube-api-access-q4fqg\") pod \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\" (UID: \"1f4d49ca-a8ee-4cd9-acd3-349c3c616627\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821260 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dee8aa58-2a9c-4597-9d06-629fa6e37648-operator-scripts\") pod \"dee8aa58-2a9c-4597-9d06-629fa6e37648\" (UID: \"dee8aa58-2a9c-4597-9d06-629fa6e37648\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821357 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-td6dt\" (UniqueName: \"kubernetes.io/projected/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-kube-api-access-td6dt\") pod \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821423 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx5g6\" (UniqueName: \"kubernetes.io/projected/eace6e2e-dba9-4430-9383-0cbcb862d675-kube-api-access-tx5g6\") pod \"eace6e2e-dba9-4430-9383-0cbcb862d675\" (UID: \"eace6e2e-dba9-4430-9383-0cbcb862d675\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.821509 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-operator-scripts\") pod \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\" (UID: \"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.822026 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f4d49ca-a8ee-4cd9-acd3-349c3c616627" (UID: "1f4d49ca-a8ee-4cd9-acd3-349c3c616627"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.822447 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eace6e2e-dba9-4430-9383-0cbcb862d675-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eace6e2e-dba9-4430-9383-0cbcb862d675" (UID: "eace6e2e-dba9-4430-9383-0cbcb862d675"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.822843 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.822861 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dee8aa58-2a9c-4597-9d06-629fa6e37648-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dee8aa58-2a9c-4597-9d06-629fa6e37648" (UID: "dee8aa58-2a9c-4597-9d06-629fa6e37648"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.822875 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eace6e2e-dba9-4430-9383-0cbcb862d675-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.823380 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0851a1f1-5a9f-4609-b638-33da1fca0f01-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0851a1f1-5a9f-4609-b638-33da1fca0f01" (UID: "0851a1f1-5a9f-4609-b638-33da1fca0f01"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.824372 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" (UID: "ed5cfc02-cb0d-4b4c-a239-40f23890dbd3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.825897 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0851a1f1-5a9f-4609-b638-33da1fca0f01-kube-api-access-2hhsb" (OuterVolumeSpecName: "kube-api-access-2hhsb") pod "0851a1f1-5a9f-4609-b638-33da1fca0f01" (UID: "0851a1f1-5a9f-4609-b638-33da1fca0f01"). InnerVolumeSpecName "kube-api-access-2hhsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.828831 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-kube-api-access-td6dt" (OuterVolumeSpecName: "kube-api-access-td6dt") pod "ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" (UID: "ed5cfc02-cb0d-4b4c-a239-40f23890dbd3"). InnerVolumeSpecName "kube-api-access-td6dt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.829858 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eace6e2e-dba9-4430-9383-0cbcb862d675-kube-api-access-tx5g6" (OuterVolumeSpecName: "kube-api-access-tx5g6") pod "eace6e2e-dba9-4430-9383-0cbcb862d675" (UID: "eace6e2e-dba9-4430-9383-0cbcb862d675"). InnerVolumeSpecName "kube-api-access-tx5g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.845457 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-kube-api-access-q4fqg" (OuterVolumeSpecName: "kube-api-access-q4fqg") pod "1f4d49ca-a8ee-4cd9-acd3-349c3c616627" (UID: "1f4d49ca-a8ee-4cd9-acd3-349c3c616627"). InnerVolumeSpecName "kube-api-access-q4fqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.846130 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dee8aa58-2a9c-4597-9d06-629fa6e37648-kube-api-access-gr84f" (OuterVolumeSpecName: "kube-api-access-gr84f") pod "dee8aa58-2a9c-4597-9d06-629fa6e37648" (UID: "dee8aa58-2a9c-4597-9d06-629fa6e37648"). InnerVolumeSpecName "kube-api-access-gr84f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.924557 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3664bd8-d8a8-4d42-b983-ee81649d4db2-operator-scripts\") pod \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.924797 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w46v8\" (UniqueName: \"kubernetes.io/projected/f3664bd8-d8a8-4d42-b983-ee81649d4db2-kube-api-access-w46v8\") pod \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\" (UID: \"f3664bd8-d8a8-4d42-b983-ee81649d4db2\") " Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925138 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3664bd8-d8a8-4d42-b983-ee81649d4db2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f3664bd8-d8a8-4d42-b983-ee81649d4db2" (UID: "f3664bd8-d8a8-4d42-b983-ee81649d4db2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925649 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925673 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0851a1f1-5a9f-4609-b638-33da1fca0f01-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925749 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4fqg\" (UniqueName: \"kubernetes.io/projected/1f4d49ca-a8ee-4cd9-acd3-349c3c616627-kube-api-access-q4fqg\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925765 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dee8aa58-2a9c-4597-9d06-629fa6e37648-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925776 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-td6dt\" (UniqueName: \"kubernetes.io/projected/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-kube-api-access-td6dt\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925790 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx5g6\" (UniqueName: \"kubernetes.io/projected/eace6e2e-dba9-4430-9383-0cbcb862d675-kube-api-access-tx5g6\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925800 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925811 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hhsb\" (UniqueName: \"kubernetes.io/projected/0851a1f1-5a9f-4609-b638-33da1fca0f01-kube-api-access-2hhsb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925821 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr84f\" (UniqueName: \"kubernetes.io/projected/dee8aa58-2a9c-4597-9d06-629fa6e37648-kube-api-access-gr84f\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.925831 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3664bd8-d8a8-4d42-b983-ee81649d4db2-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.928121 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3664bd8-d8a8-4d42-b983-ee81649d4db2-kube-api-access-w46v8" (OuterVolumeSpecName: "kube-api-access-w46v8") pod "f3664bd8-d8a8-4d42-b983-ee81649d4db2" (UID: "f3664bd8-d8a8-4d42-b983-ee81649d4db2"). InnerVolumeSpecName "kube-api-access-w46v8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.936804 4941 generic.go:334] "Generic (PLEG): container finished" podID="6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" containerID="b0c2c7fa1005f08c39968a1a685ca3212988968efd0e99b914c94cc6cf534f58" exitCode=0 Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.936846 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rskk6" event={"ID":"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1","Type":"ContainerDied","Data":"b0c2c7fa1005f08c39968a1a685ca3212988968efd0e99b914c94cc6cf534f58"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.940511 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-prvmj" event={"ID":"d0ce952e-a290-4b54-b720-c34632cef479","Type":"ContainerStarted","Data":"a4e701153073d1d552b85184ac3a38256bd5f4d62dfd4daead28e74cf142a449"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.941897 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29mwc" event={"ID":"1f4d49ca-a8ee-4cd9-acd3-349c3c616627","Type":"ContainerDied","Data":"763fcbe082eb64b18ff9ab6186329639a753a6426f28dd5e9650710b1b628061"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.941943 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="763fcbe082eb64b18ff9ab6186329639a753a6426f28dd5e9650710b1b628061" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.942021 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29mwc" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.945714 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a2cc-account-create-update-qdnx2" event={"ID":"eace6e2e-dba9-4430-9383-0cbcb862d675","Type":"ContainerDied","Data":"ea1df231dfa4333d4ca4286d0924effeae526c3cc14fd1e68061828401836717"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.945742 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea1df231dfa4333d4ca4286d0924effeae526c3cc14fd1e68061828401836717" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.945903 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a2cc-account-create-update-qdnx2" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.956687 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xpsqj" event={"ID":"dee8aa58-2a9c-4597-9d06-629fa6e37648","Type":"ContainerDied","Data":"e3bd22c69a57a3d4a8d1b9504131ebb43821a05c0005350681180aee8f6d4ebc"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.956743 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3bd22c69a57a3d4a8d1b9504131ebb43821a05c0005350681180aee8f6d4ebc" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.956818 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xpsqj" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.968236 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2hzwv" event={"ID":"ed5cfc02-cb0d-4b4c-a239-40f23890dbd3","Type":"ContainerDied","Data":"c8d103ff76a0229bb67aa67c39fcbba10826400679d2f2602dde84f98eace153"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.968271 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8d103ff76a0229bb67aa67c39fcbba10826400679d2f2602dde84f98eace153" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.968396 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2hzwv" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.977784 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-75ae-account-create-update-fl25n" event={"ID":"f3664bd8-d8a8-4d42-b983-ee81649d4db2","Type":"ContainerDied","Data":"500c1d8a7c09088f77dde9539dad3a981d023867ab6b462a9ad768e9e8f1b575"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.977818 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="500c1d8a7c09088f77dde9539dad3a981d023867ab6b462a9ad768e9e8f1b575" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.977884 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-75ae-account-create-update-fl25n" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.979378 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-43da-account-create-update-9jzkv" event={"ID":"0851a1f1-5a9f-4609-b638-33da1fca0f01","Type":"ContainerDied","Data":"822d4f31d5326d9de81501f75dd9933263865f667d4e4ada4415a8f70a8759c4"} Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.979496 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="822d4f31d5326d9de81501f75dd9933263865f667d4e4ada4415a8f70a8759c4" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.979598 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-43da-account-create-update-9jzkv" Nov 30 07:05:01 crc kubenswrapper[4941]: I1130 07:05:01.983563 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-prvmj" podStartSLOduration=2.692827658 podStartE2EDuration="7.983538128s" podCreationTimestamp="2025-11-30 07:04:54 +0000 UTC" firstStartedPulling="2025-11-30 07:04:56.275891284 +0000 UTC m=+1117.044062893" lastFinishedPulling="2025-11-30 07:05:01.566601754 +0000 UTC m=+1122.334773363" observedRunningTime="2025-11-30 07:05:01.974583309 +0000 UTC m=+1122.742754918" watchObservedRunningTime="2025-11-30 07:05:01.983538128 +0000 UTC m=+1122.751709747" Nov 30 07:05:02 crc kubenswrapper[4941]: I1130 07:05:02.027160 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w46v8\" (UniqueName: \"kubernetes.io/projected/f3664bd8-d8a8-4d42-b983-ee81649d4db2-kube-api-access-w46v8\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.046406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34"} Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.046790 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed"} Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.046804 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc"} Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.046813 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44"} Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.561772 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rskk6" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.669168 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-combined-ca-bundle\") pod \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.669768 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh6x2\" (UniqueName: \"kubernetes.io/projected/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-kube-api-access-hh6x2\") pod \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.669848 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-config-data\") pod \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.669900 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-db-sync-config-data\") pod \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\" (UID: \"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1\") " Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.676713 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" (UID: "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.677398 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-kube-api-access-hh6x2" (OuterVolumeSpecName: "kube-api-access-hh6x2") pod "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" (UID: "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1"). InnerVolumeSpecName "kube-api-access-hh6x2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.696175 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" (UID: "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.739511 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-config-data" (OuterVolumeSpecName: "config-data") pod "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" (UID: "6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.771597 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh6x2\" (UniqueName: \"kubernetes.io/projected/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-kube-api-access-hh6x2\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.771627 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.771638 4941 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:03 crc kubenswrapper[4941]: I1130 07:05:03.771647 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.057814 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rskk6" event={"ID":"6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1","Type":"ContainerDied","Data":"6b2b75b8b910ab32e34ac7b551625344dcce2c572343ee43e69e6d98a36e0518"} Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.057854 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b2b75b8b910ab32e34ac7b551625344dcce2c572343ee43e69e6d98a36e0518" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.057912 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rskk6" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.101166 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf"} Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.101221 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerStarted","Data":"70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb"} Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.519641 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=24.593340204 podStartE2EDuration="36.519621287s" podCreationTimestamp="2025-11-30 07:04:28 +0000 UTC" firstStartedPulling="2025-11-30 07:04:49.636795031 +0000 UTC m=+1110.404966640" lastFinishedPulling="2025-11-30 07:05:01.563076084 +0000 UTC m=+1122.331247723" observedRunningTime="2025-11-30 07:05:04.156586513 +0000 UTC m=+1124.924758132" watchObservedRunningTime="2025-11-30 07:05:04.519621287 +0000 UTC m=+1125.287792896" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.523835 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-xkvlc"] Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524237 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eace6e2e-dba9-4430-9383-0cbcb862d675" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524259 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="eace6e2e-dba9-4430-9383-0cbcb862d675" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524275 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3664bd8-d8a8-4d42-b983-ee81649d4db2" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524284 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3664bd8-d8a8-4d42-b983-ee81649d4db2" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524315 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0851a1f1-5a9f-4609-b638-33da1fca0f01" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524340 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="0851a1f1-5a9f-4609-b638-33da1fca0f01" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524353 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dee8aa58-2a9c-4597-9d06-629fa6e37648" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524361 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dee8aa58-2a9c-4597-9d06-629fa6e37648" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524375 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4d49ca-a8ee-4cd9-acd3-349c3c616627" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524384 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4d49ca-a8ee-4cd9-acd3-349c3c616627" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524395 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" containerName="glance-db-sync" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524403 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" containerName="glance-db-sync" Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.524422 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524429 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524633 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3664bd8-d8a8-4d42-b983-ee81649d4db2" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524662 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="0851a1f1-5a9f-4609-b638-33da1fca0f01" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524680 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="eace6e2e-dba9-4430-9383-0cbcb862d675" containerName="mariadb-account-create-update" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524703 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" containerName="glance-db-sync" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524717 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4d49ca-a8ee-4cd9-acd3-349c3c616627" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524726 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="dee8aa58-2a9c-4597-9d06-629fa6e37648" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.524740 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" containerName="mariadb-database-create" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.534677 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.545815 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-xkvlc"] Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.691467 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk4mm\" (UniqueName: \"kubernetes.io/projected/8c69e138-db57-4e44-b678-51304c47622e-kube-api-access-lk4mm\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.691529 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-dns-svc\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.691556 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-nb\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.692037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-sb\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.692332 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-config\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.700386 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-xkvlc"] Nov 30 07:05:04 crc kubenswrapper[4941]: E1130 07:05:04.701079 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-lk4mm ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" podUID="8c69e138-db57-4e44-b678-51304c47622e" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.733253 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-m7892"] Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.735956 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.737991 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.751348 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-m7892"] Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.794367 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk4mm\" (UniqueName: \"kubernetes.io/projected/8c69e138-db57-4e44-b678-51304c47622e-kube-api-access-lk4mm\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.794444 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-dns-svc\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.794473 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-nb\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.794538 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-sb\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.794577 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-config\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.795780 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-config\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.795886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-sb\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.795978 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-nb\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.796650 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-dns-svc\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.819677 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk4mm\" (UniqueName: \"kubernetes.io/projected/8c69e138-db57-4e44-b678-51304c47622e-kube-api-access-lk4mm\") pod \"dnsmasq-dns-6df78bdcfc-xkvlc\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.896492 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-svc\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.896546 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.896576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.896754 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.896806 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8wff\" (UniqueName: \"kubernetes.io/projected/2130c20e-5282-4ffa-9a31-67a8ad718092-kube-api-access-l8wff\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.897008 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-config\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.999273 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-config\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.999440 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-svc\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.999475 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.999501 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.999546 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:04 crc kubenswrapper[4941]: I1130 07:05:04.999573 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8wff\" (UniqueName: \"kubernetes.io/projected/2130c20e-5282-4ffa-9a31-67a8ad718092-kube-api-access-l8wff\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.000741 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-nb\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.000821 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-sb\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.000827 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-swift-storage-0\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.001310 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-config\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.001340 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-svc\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.037769 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8wff\" (UniqueName: \"kubernetes.io/projected/2130c20e-5282-4ffa-9a31-67a8ad718092-kube-api-access-l8wff\") pod \"dnsmasq-dns-5bfc9d5487-m7892\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.050911 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.116714 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.167223 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.305717 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-config\") pod \"8c69e138-db57-4e44-b678-51304c47622e\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.306167 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-sb\") pod \"8c69e138-db57-4e44-b678-51304c47622e\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.306239 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-nb\") pod \"8c69e138-db57-4e44-b678-51304c47622e\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.306291 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk4mm\" (UniqueName: \"kubernetes.io/projected/8c69e138-db57-4e44-b678-51304c47622e-kube-api-access-lk4mm\") pod \"8c69e138-db57-4e44-b678-51304c47622e\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.306356 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-dns-svc\") pod \"8c69e138-db57-4e44-b678-51304c47622e\" (UID: \"8c69e138-db57-4e44-b678-51304c47622e\") " Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.307138 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8c69e138-db57-4e44-b678-51304c47622e" (UID: "8c69e138-db57-4e44-b678-51304c47622e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.307482 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-config" (OuterVolumeSpecName: "config") pod "8c69e138-db57-4e44-b678-51304c47622e" (UID: "8c69e138-db57-4e44-b678-51304c47622e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.308062 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8c69e138-db57-4e44-b678-51304c47622e" (UID: "8c69e138-db57-4e44-b678-51304c47622e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.308167 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8c69e138-db57-4e44-b678-51304c47622e" (UID: "8c69e138-db57-4e44-b678-51304c47622e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.317477 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c69e138-db57-4e44-b678-51304c47622e-kube-api-access-lk4mm" (OuterVolumeSpecName: "kube-api-access-lk4mm") pod "8c69e138-db57-4e44-b678-51304c47622e" (UID: "8c69e138-db57-4e44-b678-51304c47622e"). InnerVolumeSpecName "kube-api-access-lk4mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.408789 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.408821 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.408833 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.408842 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk4mm\" (UniqueName: \"kubernetes.io/projected/8c69e138-db57-4e44-b678-51304c47622e-kube-api-access-lk4mm\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:05 crc kubenswrapper[4941]: I1130 07:05:05.408851 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c69e138-db57-4e44-b678-51304c47622e-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.089189 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-m7892"] Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.160789 4941 generic.go:334] "Generic (PLEG): container finished" podID="d0ce952e-a290-4b54-b720-c34632cef479" containerID="a4e701153073d1d552b85184ac3a38256bd5f4d62dfd4daead28e74cf142a449" exitCode=0 Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.160851 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-prvmj" event={"ID":"d0ce952e-a290-4b54-b720-c34632cef479","Type":"ContainerDied","Data":"a4e701153073d1d552b85184ac3a38256bd5f4d62dfd4daead28e74cf142a449"} Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.165811 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df78bdcfc-xkvlc" Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.166384 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" event={"ID":"2130c20e-5282-4ffa-9a31-67a8ad718092","Type":"ContainerStarted","Data":"629971c88027a38b84d99301ec5830af7e71b3b878e72d0c1ac496db9298771c"} Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.382960 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-xkvlc"] Nov 30 07:05:06 crc kubenswrapper[4941]: I1130 07:05:06.393066 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6df78bdcfc-xkvlc"] Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.176751 4941 generic.go:334] "Generic (PLEG): container finished" podID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerID="2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23" exitCode=0 Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.176876 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" event={"ID":"2130c20e-5282-4ffa-9a31-67a8ad718092","Type":"ContainerDied","Data":"2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23"} Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.534812 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c69e138-db57-4e44-b678-51304c47622e" path="/var/lib/kubelet/pods/8c69e138-db57-4e44-b678-51304c47622e/volumes" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.537172 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-prvmj" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.647382 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-combined-ca-bundle\") pod \"d0ce952e-a290-4b54-b720-c34632cef479\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.647543 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8pgw\" (UniqueName: \"kubernetes.io/projected/d0ce952e-a290-4b54-b720-c34632cef479-kube-api-access-g8pgw\") pod \"d0ce952e-a290-4b54-b720-c34632cef479\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.647584 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-config-data\") pod \"d0ce952e-a290-4b54-b720-c34632cef479\" (UID: \"d0ce952e-a290-4b54-b720-c34632cef479\") " Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.652555 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0ce952e-a290-4b54-b720-c34632cef479-kube-api-access-g8pgw" (OuterVolumeSpecName: "kube-api-access-g8pgw") pod "d0ce952e-a290-4b54-b720-c34632cef479" (UID: "d0ce952e-a290-4b54-b720-c34632cef479"). InnerVolumeSpecName "kube-api-access-g8pgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.677486 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0ce952e-a290-4b54-b720-c34632cef479" (UID: "d0ce952e-a290-4b54-b720-c34632cef479"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.697567 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-config-data" (OuterVolumeSpecName: "config-data") pod "d0ce952e-a290-4b54-b720-c34632cef479" (UID: "d0ce952e-a290-4b54-b720-c34632cef479"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.749546 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.749812 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8pgw\" (UniqueName: \"kubernetes.io/projected/d0ce952e-a290-4b54-b720-c34632cef479-kube-api-access-g8pgw\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:07 crc kubenswrapper[4941]: I1130 07:05:07.749906 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ce952e-a290-4b54-b720-c34632cef479-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.218117 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" event={"ID":"2130c20e-5282-4ffa-9a31-67a8ad718092","Type":"ContainerStarted","Data":"5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2"} Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.218409 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.227277 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-prvmj" event={"ID":"d0ce952e-a290-4b54-b720-c34632cef479","Type":"ContainerDied","Data":"ef9c1fb348173ba6a8b157e184931dbba046da4de4d9237fc38adae619d58c4b"} Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.227382 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef9c1fb348173ba6a8b157e184931dbba046da4de4d9237fc38adae619d58c4b" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.227402 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-prvmj" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.270263 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" podStartSLOduration=4.270214548 podStartE2EDuration="4.270214548s" podCreationTimestamp="2025-11-30 07:05:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:08.255881731 +0000 UTC m=+1129.024053340" watchObservedRunningTime="2025-11-30 07:05:08.270214548 +0000 UTC m=+1129.038386157" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.461394 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-m7892"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.494394 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-8q8km"] Nov 30 07:05:08 crc kubenswrapper[4941]: E1130 07:05:08.495030 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0ce952e-a290-4b54-b720-c34632cef479" containerName="keystone-db-sync" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.495096 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0ce952e-a290-4b54-b720-c34632cef479" containerName="keystone-db-sync" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.495349 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0ce952e-a290-4b54-b720-c34632cef479" containerName="keystone-db-sync" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.496555 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.507809 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-8q8km"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.567894 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tpcw2"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.576313 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.581856 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ds5bd" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.582085 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.582218 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.582406 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.582658 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.593734 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tpcw2"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.677192 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-nb\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.677262 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-combined-ca-bundle\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.677310 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-config-data\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.678972 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-swift-storage-0\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679006 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-credential-keys\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679030 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrkft\" (UniqueName: \"kubernetes.io/projected/6058fcb9-4495-466b-85ac-ac1f0f5ac333-kube-api-access-jrkft\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679102 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-fernet-keys\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679122 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-sb\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679169 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-scripts\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-config\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679229 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flpnt\" (UniqueName: \"kubernetes.io/projected/614f03ad-3871-4e75-82b0-a3dd8b622c37-kube-api-access-flpnt\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.679256 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-svc\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.718134 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-5zxfl"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.721818 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.738234 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9mc46" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.738681 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.738947 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782716 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-fernet-keys\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782780 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-sb\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782823 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-scripts\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782848 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-config\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782871 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flpnt\" (UniqueName: \"kubernetes.io/projected/614f03ad-3871-4e75-82b0-a3dd8b622c37-kube-api-access-flpnt\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782897 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-svc\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782930 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-nb\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782954 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-combined-ca-bundle\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782981 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-config-data\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.782999 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-swift-storage-0\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.783016 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-credential-keys\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.783040 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrkft\" (UniqueName: \"kubernetes.io/projected/6058fcb9-4495-466b-85ac-ac1f0f5ac333-kube-api-access-jrkft\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.783837 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-config\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.784150 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-svc\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.784820 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-nb\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.785241 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5zxfl"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.787407 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-sb\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.787485 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-swift-storage-0\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.844841 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-combined-ca-bundle\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.844965 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-scripts\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.844987 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-config-data\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.845494 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-fernet-keys\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.848797 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-credential-keys\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.851948 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrkft\" (UniqueName: \"kubernetes.io/projected/6058fcb9-4495-466b-85ac-ac1f0f5ac333-kube-api-access-jrkft\") pod \"keystone-bootstrap-tpcw2\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.877447 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flpnt\" (UniqueName: \"kubernetes.io/projected/614f03ad-3871-4e75-82b0-a3dd8b622c37-kube-api-access-flpnt\") pod \"dnsmasq-dns-65c6dfc787-8q8km\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.884578 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-scripts\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.884659 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-db-sync-config-data\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.884742 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-combined-ca-bundle\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.884808 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-config-data\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.884840 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4thd\" (UniqueName: \"kubernetes.io/projected/2ee0724a-81f2-4b40-959f-e831f4d9abf5-kube-api-access-s4thd\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.884884 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ee0724a-81f2-4b40-959f-e831f4d9abf5-etc-machine-id\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.908991 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.936563 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-wtqm6"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.940053 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.944323 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wtqm6"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.947994 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.969419 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jlmlm"] Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.971081 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.976141 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-vd4td" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.976316 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.986628 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ee0724a-81f2-4b40-959f-e831f4d9abf5-etc-machine-id\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.986705 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-scripts\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.986781 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-db-sync-config-data\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.986859 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-combined-ca-bundle\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.986925 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-config-data\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.986953 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4thd\" (UniqueName: \"kubernetes.io/projected/2ee0724a-81f2-4b40-959f-e831f4d9abf5-kube-api-access-s4thd\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.987471 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ee0724a-81f2-4b40-959f-e831f4d9abf5-etc-machine-id\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.987903 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.988057 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.988161 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-pdqvq" Nov 30 07:05:08 crc kubenswrapper[4941]: I1130 07:05:08.994935 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jlmlm"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.010596 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-combined-ca-bundle\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.018050 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-db-sync-config-data\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.021764 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-scripts\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.023021 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-config-data\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.054935 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4thd\" (UniqueName: \"kubernetes.io/projected/2ee0724a-81f2-4b40-959f-e831f4d9abf5-kube-api-access-s4thd\") pod \"cinder-db-sync-5zxfl\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.058400 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-hbkr7"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.059757 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.066970 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.070199 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090542 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vm2xx" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090721 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8th8\" (UniqueName: \"kubernetes.io/projected/bb8e782b-25d0-452c-a19a-7e43765f4200-kube-api-access-c8th8\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090793 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090845 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-config\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090867 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/414c3399-b5e5-4e28-a1ab-d646fa3193fe-logs\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090896 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-combined-ca-bundle\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090929 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-combined-ca-bundle\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090950 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcxj4\" (UniqueName: \"kubernetes.io/projected/414c3399-b5e5-4e28-a1ab-d646fa3193fe-kube-api-access-jcxj4\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.090992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-scripts\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.104707 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-hbkr7"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.171046 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.178390 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-8q8km"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195216 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-combined-ca-bundle\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195276 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-combined-ca-bundle\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195298 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcxj4\" (UniqueName: \"kubernetes.io/projected/414c3399-b5e5-4e28-a1ab-d646fa3193fe-kube-api-access-jcxj4\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195347 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-combined-ca-bundle\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195382 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-db-sync-config-data\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195427 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-scripts\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195470 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8th8\" (UniqueName: \"kubernetes.io/projected/bb8e782b-25d0-452c-a19a-7e43765f4200-kube-api-access-c8th8\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195507 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195532 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fp8x\" (UniqueName: \"kubernetes.io/projected/d4e9c60f-d375-4332-ab45-93a3e96be457-kube-api-access-8fp8x\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195578 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-config\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.195597 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/414c3399-b5e5-4e28-a1ab-d646fa3193fe-logs\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.196017 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/414c3399-b5e5-4e28-a1ab-d646fa3193fe-logs\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.216346 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-scripts\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.219143 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-combined-ca-bundle\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.220894 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-config\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.233887 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f645789c-rtfz6"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.235459 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.240427 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-combined-ca-bundle\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.240989 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.264166 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8th8\" (UniqueName: \"kubernetes.io/projected/bb8e782b-25d0-452c-a19a-7e43765f4200-kube-api-access-c8th8\") pod \"neutron-db-sync-wtqm6\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.271556 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcxj4\" (UniqueName: \"kubernetes.io/projected/414c3399-b5e5-4e28-a1ab-d646fa3193fe-kube-api-access-jcxj4\") pod \"placement-db-sync-jlmlm\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.299315 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fp8x\" (UniqueName: \"kubernetes.io/projected/d4e9c60f-d375-4332-ab45-93a3e96be457-kube-api-access-8fp8x\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.299912 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-combined-ca-bundle\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.299942 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-db-sync-config-data\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.323419 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.328625 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-db-sync-config-data\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.340616 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-combined-ca-bundle\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.354169 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.366574 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-rtfz6"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.367533 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fp8x\" (UniqueName: \"kubernetes.io/projected/d4e9c60f-d375-4332-ab45-93a3e96be457-kube-api-access-8fp8x\") pod \"barbican-db-sync-hbkr7\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.405433 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.407839 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.412071 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zllb2\" (UniqueName: \"kubernetes.io/projected/d71115c3-7782-4396-8e3a-37cdacdb7802-kube-api-access-zllb2\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.412128 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-swift-storage-0\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.412293 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-svc\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.412353 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-config\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.412381 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-sb\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.412471 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-nb\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.413602 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.413805 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.437992 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.442896 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.552727 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-svc\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553160 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw8zc\" (UniqueName: \"kubernetes.io/projected/20e82100-3d40-4b16-be58-15e74c1aee65-kube-api-access-dw8zc\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553227 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-config\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553262 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-sb\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553485 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553533 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-nb\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553618 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-run-httpd\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553662 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zllb2\" (UniqueName: \"kubernetes.io/projected/d71115c3-7782-4396-8e3a-37cdacdb7802-kube-api-access-zllb2\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553715 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-swift-storage-0\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553866 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.553989 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.554047 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-log-httpd\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.554195 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-scripts\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.556046 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-nb\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.561843 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-swift-storage-0\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.567919 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-config\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.568619 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-sb\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.579886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-svc\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.589313 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zllb2\" (UniqueName: \"kubernetes.io/projected/d71115c3-7782-4396-8e3a-37cdacdb7802-kube-api-access-zllb2\") pod \"dnsmasq-dns-55f645789c-rtfz6\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.657688 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.659140 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-log-httpd\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.659275 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-scripts\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.659392 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw8zc\" (UniqueName: \"kubernetes.io/projected/20e82100-3d40-4b16-be58-15e74c1aee65-kube-api-access-dw8zc\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.659505 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.659600 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-run-httpd\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.659748 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.663270 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.665973 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.666923 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-run-httpd\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.667519 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-log-httpd\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.668184 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.680092 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.684053 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.687456 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw8zc\" (UniqueName: \"kubernetes.io/projected/20e82100-3d40-4b16-be58-15e74c1aee65-kube-api-access-dw8zc\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.689181 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.691691 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.691913 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-vv8hz" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.692036 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.695287 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-scripts\") pod \"ceilometer-0\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.706263 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.800568 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.811211 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.813715 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.819564 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.819767 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.845621 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.853928 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865202 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865251 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865290 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-logs\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865336 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-config-data\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865360 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865405 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865429 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-scripts\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.865455 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shcv4\" (UniqueName: \"kubernetes.io/projected/b3422143-7872-4217-94e5-f1be044d7858-kube-api-access-shcv4\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: W1130 07:05:09.882477 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6058fcb9_4495_466b_85ac_ac1f0f5ac333.slice/crio-510009f0c5665b98cee612e847ad45e7aaf290c8a866c66b65cc96d93c253bbc WatchSource:0}: Error finding container 510009f0c5665b98cee612e847ad45e7aaf290c8a866c66b65cc96d93c253bbc: Status 404 returned error can't find the container with id 510009f0c5665b98cee612e847ad45e7aaf290c8a866c66b65cc96d93c253bbc Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.885542 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tpcw2"] Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.967012 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969155 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.968722 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969200 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969556 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969693 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969716 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969767 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969813 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-scripts\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969835 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkch7\" (UniqueName: \"kubernetes.io/projected/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-kube-api-access-dkch7\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.969957 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shcv4\" (UniqueName: \"kubernetes.io/projected/b3422143-7872-4217-94e5-f1be044d7858-kube-api-access-shcv4\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970151 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970172 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970271 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970334 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970432 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-logs\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970538 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-config-data\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.970918 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.974731 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-config-data\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.975011 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-logs\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.975209 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-scripts\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.976532 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:09 crc kubenswrapper[4941]: I1130 07:05:09.982589 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.003251 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shcv4\" (UniqueName: \"kubernetes.io/projected/b3422143-7872-4217-94e5-f1be044d7858-kube-api-access-shcv4\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.018270 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.024908 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072015 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072055 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072089 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkch7\" (UniqueName: \"kubernetes.io/projected/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-kube-api-access-dkch7\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072154 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072172 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072239 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072260 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.072280 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.073359 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-logs\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.073625 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.074140 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.084589 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.084704 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.085219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.086296 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.092429 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-8q8km"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.110444 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkch7\" (UniqueName: \"kubernetes.io/projected/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-kube-api-access-dkch7\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.114842 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.127081 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-5zxfl"] Nov 30 07:05:10 crc kubenswrapper[4941]: W1130 07:05:10.146617 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ee0724a_81f2_4b40_959f_e831f4d9abf5.slice/crio-90251ff78757c44348454b6c0bbdd3bb672228f7c90a57af88f41ff2dbe41077 WatchSource:0}: Error finding container 90251ff78757c44348454b6c0bbdd3bb672228f7c90a57af88f41ff2dbe41077: Status 404 returned error can't find the container with id 90251ff78757c44348454b6c0bbdd3bb672228f7c90a57af88f41ff2dbe41077 Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.149898 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.258883 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-hbkr7"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.269092 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jlmlm"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.275868 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wtqm6"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.296124 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tpcw2" event={"ID":"6058fcb9-4495-466b-85ac-ac1f0f5ac333","Type":"ContainerStarted","Data":"6b905a1a1127e47f5289ba8b21eee56a00d5a168712f6b2da0f2d56730a1e613"} Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.296162 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tpcw2" event={"ID":"6058fcb9-4495-466b-85ac-ac1f0f5ac333","Type":"ContainerStarted","Data":"510009f0c5665b98cee612e847ad45e7aaf290c8a866c66b65cc96d93c253bbc"} Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.303543 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5zxfl" event={"ID":"2ee0724a-81f2-4b40-959f-e831f4d9abf5","Type":"ContainerStarted","Data":"90251ff78757c44348454b6c0bbdd3bb672228f7c90a57af88f41ff2dbe41077"} Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.320827 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerName="dnsmasq-dns" containerID="cri-o://5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2" gracePeriod=10 Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.320941 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" event={"ID":"614f03ad-3871-4e75-82b0-a3dd8b622c37","Type":"ContainerStarted","Data":"626d4ab7f992ad2a915468b70396f3bcca6cc1d6a28de0618da956570acd496d"} Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.328632 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tpcw2" podStartSLOduration=2.328607959 podStartE2EDuration="2.328607959s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:10.317036059 +0000 UTC m=+1131.085207668" watchObservedRunningTime="2025-11-30 07:05:10.328607959 +0000 UTC m=+1131.096779568" Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.548961 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-rtfz6"] Nov 30 07:05:10 crc kubenswrapper[4941]: W1130 07:05:10.555868 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20e82100_3d40_4b16_be58_15e74c1aee65.slice/crio-1a7fb7fd578b4aae1d498f809f5473128080ec5630dc7c49196b8290ab5d178d WatchSource:0}: Error finding container 1a7fb7fd578b4aae1d498f809f5473128080ec5630dc7c49196b8290ab5d178d: Status 404 returned error can't find the container with id 1a7fb7fd578b4aae1d498f809f5473128080ec5630dc7c49196b8290ab5d178d Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.560722 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.816257 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.890500 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:10 crc kubenswrapper[4941]: I1130 07:05:10.976571 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.116818 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-config\") pod \"2130c20e-5282-4ffa-9a31-67a8ad718092\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.116978 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-sb\") pod \"2130c20e-5282-4ffa-9a31-67a8ad718092\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.117127 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8wff\" (UniqueName: \"kubernetes.io/projected/2130c20e-5282-4ffa-9a31-67a8ad718092-kube-api-access-l8wff\") pod \"2130c20e-5282-4ffa-9a31-67a8ad718092\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.117250 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-svc\") pod \"2130c20e-5282-4ffa-9a31-67a8ad718092\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.117448 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-swift-storage-0\") pod \"2130c20e-5282-4ffa-9a31-67a8ad718092\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.117670 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-nb\") pod \"2130c20e-5282-4ffa-9a31-67a8ad718092\" (UID: \"2130c20e-5282-4ffa-9a31-67a8ad718092\") " Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.128409 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2130c20e-5282-4ffa-9a31-67a8ad718092-kube-api-access-l8wff" (OuterVolumeSpecName: "kube-api-access-l8wff") pod "2130c20e-5282-4ffa-9a31-67a8ad718092" (UID: "2130c20e-5282-4ffa-9a31-67a8ad718092"). InnerVolumeSpecName "kube-api-access-l8wff". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.176643 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2130c20e-5282-4ffa-9a31-67a8ad718092" (UID: "2130c20e-5282-4ffa-9a31-67a8ad718092"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.185041 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-config" (OuterVolumeSpecName: "config") pod "2130c20e-5282-4ffa-9a31-67a8ad718092" (UID: "2130c20e-5282-4ffa-9a31-67a8ad718092"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.194702 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2130c20e-5282-4ffa-9a31-67a8ad718092" (UID: "2130c20e-5282-4ffa-9a31-67a8ad718092"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.206401 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2130c20e-5282-4ffa-9a31-67a8ad718092" (UID: "2130c20e-5282-4ffa-9a31-67a8ad718092"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.211111 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2130c20e-5282-4ffa-9a31-67a8ad718092" (UID: "2130c20e-5282-4ffa-9a31-67a8ad718092"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.220221 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.220259 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.220269 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.220279 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8wff\" (UniqueName: \"kubernetes.io/projected/2130c20e-5282-4ffa-9a31-67a8ad718092-kube-api-access-l8wff\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.220289 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.220297 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2130c20e-5282-4ffa-9a31-67a8ad718092-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.337150 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3422143-7872-4217-94e5-f1be044d7858","Type":"ContainerStarted","Data":"19fc60a826246e65dd97c3184f70ac78e9e48da5617a55bac05254854b9c9d85"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.343414 4941 generic.go:334] "Generic (PLEG): container finished" podID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerID="d8144984463211b224e6bafdd6518e9366676352c73717b35407f531e2e1d134" exitCode=0 Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.343497 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" event={"ID":"d71115c3-7782-4396-8e3a-37cdacdb7802","Type":"ContainerDied","Data":"d8144984463211b224e6bafdd6518e9366676352c73717b35407f531e2e1d134"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.343546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" event={"ID":"d71115c3-7782-4396-8e3a-37cdacdb7802","Type":"ContainerStarted","Data":"fca3450d3c0b5974cbe676aae0dc687abf0419b76a97f62308c1da14fe079da6"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.396222 4941 generic.go:334] "Generic (PLEG): container finished" podID="614f03ad-3871-4e75-82b0-a3dd8b622c37" containerID="3fe9ec886f48b70f9f865bb89d7151b2d7bbd680d5532d5ec513a5ae80226fbd" exitCode=0 Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.397523 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" event={"ID":"614f03ad-3871-4e75-82b0-a3dd8b622c37","Type":"ContainerDied","Data":"3fe9ec886f48b70f9f865bb89d7151b2d7bbd680d5532d5ec513a5ae80226fbd"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.439200 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerStarted","Data":"1a7fb7fd578b4aae1d498f809f5473128080ec5630dc7c49196b8290ab5d178d"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.449494 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jlmlm" event={"ID":"414c3399-b5e5-4e28-a1ab-d646fa3193fe","Type":"ContainerStarted","Data":"f4969fc490158f196b79a4ce075438ed0887a20c730a74dc3de3f61df42f2df2"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.452350 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec4d86fb-c4d0-4586-b3d1-fae1797073dd","Type":"ContainerStarted","Data":"f6e86f57f5c913cc04a412609e143cf16c5ccc8bd6ac8c0b5135178ed4e3c77f"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.454376 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hbkr7" event={"ID":"d4e9c60f-d375-4332-ab45-93a3e96be457","Type":"ContainerStarted","Data":"a5009db6ed155c27a228030cd71eb40fdd62416f5c5fe944c6de784d3329c053"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.455905 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wtqm6" event={"ID":"bb8e782b-25d0-452c-a19a-7e43765f4200","Type":"ContainerStarted","Data":"d1444176c7f62858717bcdb5f8821c6cf482c99c0b1bac70adccf0b14c0cde7c"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.455930 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wtqm6" event={"ID":"bb8e782b-25d0-452c-a19a-7e43765f4200","Type":"ContainerStarted","Data":"e781b1c1b03ca0965534a4e5a2371be77dda84658271438895b09cfaf2f117a6"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.459646 4941 generic.go:334] "Generic (PLEG): container finished" podID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerID="5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2" exitCode=0 Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.460170 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.464746 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" event={"ID":"2130c20e-5282-4ffa-9a31-67a8ad718092","Type":"ContainerDied","Data":"5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.464798 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bfc9d5487-m7892" event={"ID":"2130c20e-5282-4ffa-9a31-67a8ad718092","Type":"ContainerDied","Data":"629971c88027a38b84d99301ec5830af7e71b3b878e72d0c1ac496db9298771c"} Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.464817 4941 scope.go:117] "RemoveContainer" containerID="5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.503224 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-wtqm6" podStartSLOduration=3.503199817 podStartE2EDuration="3.503199817s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:11.468916908 +0000 UTC m=+1132.237088517" watchObservedRunningTime="2025-11-30 07:05:11.503199817 +0000 UTC m=+1132.271371446" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.599900 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-m7892"] Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.605942 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bfc9d5487-m7892"] Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.606661 4941 scope.go:117] "RemoveContainer" containerID="2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.764881 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.851731 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.894283 4941 scope.go:117] "RemoveContainer" containerID="5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.894603 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:05:11 crc kubenswrapper[4941]: E1130 07:05:11.898580 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2\": container with ID starting with 5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2 not found: ID does not exist" containerID="5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.898621 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2"} err="failed to get container status \"5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2\": rpc error: code = NotFound desc = could not find container \"5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2\": container with ID starting with 5a2cbe078d9ed447b7e83228fd89ad98302780ccbfc1a273129fb54d4d3d9ab2 not found: ID does not exist" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.898650 4941 scope.go:117] "RemoveContainer" containerID="2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23" Nov 30 07:05:11 crc kubenswrapper[4941]: E1130 07:05:11.906544 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23\": container with ID starting with 2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23 not found: ID does not exist" containerID="2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.906610 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23"} err="failed to get container status \"2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23\": rpc error: code = NotFound desc = could not find container \"2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23\": container with ID starting with 2d5bbf7a9b0c58b2b2d3ec8f5c6121337c777f929522c4f93d77eb8dc063db23 not found: ID does not exist" Nov 30 07:05:11 crc kubenswrapper[4941]: I1130 07:05:11.952179 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.052400 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-swift-storage-0\") pod \"614f03ad-3871-4e75-82b0-a3dd8b622c37\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.052463 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-nb\") pod \"614f03ad-3871-4e75-82b0-a3dd8b622c37\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.052526 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-svc\") pod \"614f03ad-3871-4e75-82b0-a3dd8b622c37\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.052545 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-config\") pod \"614f03ad-3871-4e75-82b0-a3dd8b622c37\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.052672 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flpnt\" (UniqueName: \"kubernetes.io/projected/614f03ad-3871-4e75-82b0-a3dd8b622c37-kube-api-access-flpnt\") pod \"614f03ad-3871-4e75-82b0-a3dd8b622c37\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.052771 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-sb\") pod \"614f03ad-3871-4e75-82b0-a3dd8b622c37\" (UID: \"614f03ad-3871-4e75-82b0-a3dd8b622c37\") " Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.066671 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/614f03ad-3871-4e75-82b0-a3dd8b622c37-kube-api-access-flpnt" (OuterVolumeSpecName: "kube-api-access-flpnt") pod "614f03ad-3871-4e75-82b0-a3dd8b622c37" (UID: "614f03ad-3871-4e75-82b0-a3dd8b622c37"). InnerVolumeSpecName "kube-api-access-flpnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.103254 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "614f03ad-3871-4e75-82b0-a3dd8b622c37" (UID: "614f03ad-3871-4e75-82b0-a3dd8b622c37"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.121245 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "614f03ad-3871-4e75-82b0-a3dd8b622c37" (UID: "614f03ad-3871-4e75-82b0-a3dd8b622c37"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.154836 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-config" (OuterVolumeSpecName: "config") pod "614f03ad-3871-4e75-82b0-a3dd8b622c37" (UID: "614f03ad-3871-4e75-82b0-a3dd8b622c37"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.158017 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.158048 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.158058 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.158067 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flpnt\" (UniqueName: \"kubernetes.io/projected/614f03ad-3871-4e75-82b0-a3dd8b622c37-kube-api-access-flpnt\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.227837 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "614f03ad-3871-4e75-82b0-a3dd8b622c37" (UID: "614f03ad-3871-4e75-82b0-a3dd8b622c37"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.260527 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.303560 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "614f03ad-3871-4e75-82b0-a3dd8b622c37" (UID: "614f03ad-3871-4e75-82b0-a3dd8b622c37"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.361922 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/614f03ad-3871-4e75-82b0-a3dd8b622c37-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.492495 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.492551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65c6dfc787-8q8km" event={"ID":"614f03ad-3871-4e75-82b0-a3dd8b622c37","Type":"ContainerDied","Data":"626d4ab7f992ad2a915468b70396f3bcca6cc1d6a28de0618da956570acd496d"} Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.492625 4941 scope.go:117] "RemoveContainer" containerID="3fe9ec886f48b70f9f865bb89d7151b2d7bbd680d5532d5ec513a5ae80226fbd" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.503098 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec4d86fb-c4d0-4586-b3d1-fae1797073dd","Type":"ContainerStarted","Data":"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6"} Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.506580 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3422143-7872-4217-94e5-f1be044d7858","Type":"ContainerStarted","Data":"d17b55a5118094b06219a0a789efc9065c20e6751b586a64858bac3a43221e13"} Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.512891 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" event={"ID":"d71115c3-7782-4396-8e3a-37cdacdb7802","Type":"ContainerStarted","Data":"267ed5a6dd6ec77d04a9c5ce03a3cd0327ea124fdba4126b565c34b34b0d6014"} Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.512966 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.536256 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" podStartSLOduration=3.536240883 podStartE2EDuration="3.536240883s" podCreationTimestamp="2025-11-30 07:05:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:12.530721491 +0000 UTC m=+1133.298893100" watchObservedRunningTime="2025-11-30 07:05:12.536240883 +0000 UTC m=+1133.304412492" Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.647772 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-8q8km"] Nov 30 07:05:12 crc kubenswrapper[4941]: I1130 07:05:12.666396 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65c6dfc787-8q8km"] Nov 30 07:05:13 crc kubenswrapper[4941]: I1130 07:05:13.536389 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" path="/var/lib/kubelet/pods/2130c20e-5282-4ffa-9a31-67a8ad718092/volumes" Nov 30 07:05:13 crc kubenswrapper[4941]: I1130 07:05:13.538084 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="614f03ad-3871-4e75-82b0-a3dd8b622c37" path="/var/lib/kubelet/pods/614f03ad-3871-4e75-82b0-a3dd8b622c37/volumes" Nov 30 07:05:13 crc kubenswrapper[4941]: I1130 07:05:13.540803 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-log" containerID="cri-o://2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6" gracePeriod=30 Nov 30 07:05:13 crc kubenswrapper[4941]: I1130 07:05:13.540948 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec4d86fb-c4d0-4586-b3d1-fae1797073dd","Type":"ContainerStarted","Data":"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39"} Nov 30 07:05:13 crc kubenswrapper[4941]: I1130 07:05:13.541007 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-httpd" containerID="cri-o://e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39" gracePeriod=30 Nov 30 07:05:13 crc kubenswrapper[4941]: I1130 07:05:13.572254 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.57223758 podStartE2EDuration="5.57223758s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:13.570452244 +0000 UTC m=+1134.338623853" watchObservedRunningTime="2025-11-30 07:05:13.57223758 +0000 UTC m=+1134.340409189" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.285413 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422232 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-scripts\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422347 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-httpd-run\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422435 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422485 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-internal-tls-certs\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422620 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-config-data\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422657 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkch7\" (UniqueName: \"kubernetes.io/projected/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-kube-api-access-dkch7\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422673 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-logs\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.422731 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-combined-ca-bundle\") pod \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\" (UID: \"ec4d86fb-c4d0-4586-b3d1-fae1797073dd\") " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.425602 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-logs" (OuterVolumeSpecName: "logs") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.425703 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.432226 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.432250 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-kube-api-access-dkch7" (OuterVolumeSpecName: "kube-api-access-dkch7") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "kube-api-access-dkch7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.443609 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-scripts" (OuterVolumeSpecName: "scripts") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.467691 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.481530 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.494509 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-config-data" (OuterVolumeSpecName: "config-data") pod "ec4d86fb-c4d0-4586-b3d1-fae1797073dd" (UID: "ec4d86fb-c4d0-4586-b3d1-fae1797073dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526421 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526456 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkch7\" (UniqueName: \"kubernetes.io/projected/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-kube-api-access-dkch7\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526468 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526476 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526486 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526494 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526531 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.526541 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec4d86fb-c4d0-4586-b3d1-fae1797073dd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.545545 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.559025 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3422143-7872-4217-94e5-f1be044d7858","Type":"ContainerStarted","Data":"f1537bff15cdfd93022aff7232d986b90b1516b9cdd37a6578cb802e96242d55"} Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.559190 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-log" containerID="cri-o://d17b55a5118094b06219a0a789efc9065c20e6751b586a64858bac3a43221e13" gracePeriod=30 Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.559585 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-httpd" containerID="cri-o://f1537bff15cdfd93022aff7232d986b90b1516b9cdd37a6578cb802e96242d55" gracePeriod=30 Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.564813 4941 generic.go:334] "Generic (PLEG): container finished" podID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerID="e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39" exitCode=0 Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.564852 4941 generic.go:334] "Generic (PLEG): container finished" podID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerID="2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6" exitCode=143 Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.564917 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.564959 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec4d86fb-c4d0-4586-b3d1-fae1797073dd","Type":"ContainerDied","Data":"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39"} Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.564985 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec4d86fb-c4d0-4586-b3d1-fae1797073dd","Type":"ContainerDied","Data":"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6"} Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.564995 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ec4d86fb-c4d0-4586-b3d1-fae1797073dd","Type":"ContainerDied","Data":"f6e86f57f5c913cc04a412609e143cf16c5ccc8bd6ac8c0b5135178ed4e3c77f"} Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.565013 4941 scope.go:117] "RemoveContainer" containerID="e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.583506 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.583462506 podStartE2EDuration="6.583462506s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:14.580561786 +0000 UTC m=+1135.348733395" watchObservedRunningTime="2025-11-30 07:05:14.583462506 +0000 UTC m=+1135.351634115" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.628506 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.632615 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.643605 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.650740 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:14 crc kubenswrapper[4941]: E1130 07:05:14.651077 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-httpd" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651090 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-httpd" Nov 30 07:05:14 crc kubenswrapper[4941]: E1130 07:05:14.651106 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerName="init" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651112 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerName="init" Nov 30 07:05:14 crc kubenswrapper[4941]: E1130 07:05:14.651123 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-log" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651130 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-log" Nov 30 07:05:14 crc kubenswrapper[4941]: E1130 07:05:14.651151 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerName="dnsmasq-dns" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651156 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerName="dnsmasq-dns" Nov 30 07:05:14 crc kubenswrapper[4941]: E1130 07:05:14.651163 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="614f03ad-3871-4e75-82b0-a3dd8b622c37" containerName="init" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651168 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="614f03ad-3871-4e75-82b0-a3dd8b622c37" containerName="init" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651318 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="614f03ad-3871-4e75-82b0-a3dd8b622c37" containerName="init" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651348 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2130c20e-5282-4ffa-9a31-67a8ad718092" containerName="dnsmasq-dns" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651362 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-log" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.651368 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" containerName="glance-httpd" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.652244 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.655901 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.656153 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.683228 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.732097 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54bxv\" (UniqueName: \"kubernetes.io/projected/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-kube-api-access-54bxv\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.732164 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.732189 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.732947 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.733031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.733217 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.733243 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.733387 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-logs\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.835817 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.836995 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.837290 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.837462 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.837678 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.837873 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-logs\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.838032 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54bxv\" (UniqueName: \"kubernetes.io/projected/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-kube-api-access-54bxv\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.838076 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.838107 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.838472 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.838897 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-logs\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.844055 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.844483 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.849106 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.856125 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.857617 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54bxv\" (UniqueName: \"kubernetes.io/projected/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-kube-api-access-54bxv\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.897618 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:05:14 crc kubenswrapper[4941]: I1130 07:05:14.982575 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.535971 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec4d86fb-c4d0-4586-b3d1-fae1797073dd" path="/var/lib/kubelet/pods/ec4d86fb-c4d0-4586-b3d1-fae1797073dd/volumes" Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.591559 4941 generic.go:334] "Generic (PLEG): container finished" podID="6058fcb9-4495-466b-85ac-ac1f0f5ac333" containerID="6b905a1a1127e47f5289ba8b21eee56a00d5a168712f6b2da0f2d56730a1e613" exitCode=0 Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.591633 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tpcw2" event={"ID":"6058fcb9-4495-466b-85ac-ac1f0f5ac333","Type":"ContainerDied","Data":"6b905a1a1127e47f5289ba8b21eee56a00d5a168712f6b2da0f2d56730a1e613"} Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.596180 4941 generic.go:334] "Generic (PLEG): container finished" podID="b3422143-7872-4217-94e5-f1be044d7858" containerID="f1537bff15cdfd93022aff7232d986b90b1516b9cdd37a6578cb802e96242d55" exitCode=0 Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.596207 4941 generic.go:334] "Generic (PLEG): container finished" podID="b3422143-7872-4217-94e5-f1be044d7858" containerID="d17b55a5118094b06219a0a789efc9065c20e6751b586a64858bac3a43221e13" exitCode=143 Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.596224 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3422143-7872-4217-94e5-f1be044d7858","Type":"ContainerDied","Data":"f1537bff15cdfd93022aff7232d986b90b1516b9cdd37a6578cb802e96242d55"} Nov 30 07:05:15 crc kubenswrapper[4941]: I1130 07:05:15.596244 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3422143-7872-4217-94e5-f1be044d7858","Type":"ContainerDied","Data":"d17b55a5118094b06219a0a789efc9065c20e6751b586a64858bac3a43221e13"} Nov 30 07:05:19 crc kubenswrapper[4941]: I1130 07:05:19.802558 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:19 crc kubenswrapper[4941]: I1130 07:05:19.874154 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-wg6p7"] Nov 30 07:05:19 crc kubenswrapper[4941]: I1130 07:05:19.874446 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="dnsmasq-dns" containerID="cri-o://cfa64cfa6ce7ea072fbcdd1db81afa8d60832fe8d0ea99c2a0036c7e0e231ce5" gracePeriod=10 Nov 30 07:05:20 crc kubenswrapper[4941]: I1130 07:05:20.657494 4941 generic.go:334] "Generic (PLEG): container finished" podID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerID="cfa64cfa6ce7ea072fbcdd1db81afa8d60832fe8d0ea99c2a0036c7e0e231ce5" exitCode=0 Nov 30 07:05:20 crc kubenswrapper[4941]: I1130 07:05:20.657692 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" event={"ID":"f2064ce3-0785-4fa0-913d-c24f55138a87","Type":"ContainerDied","Data":"cfa64cfa6ce7ea072fbcdd1db81afa8d60832fe8d0ea99c2a0036c7e0e231ce5"} Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.160131 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.226577 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-credential-keys\") pod \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.226989 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-fernet-keys\") pod \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.227810 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-config-data\") pod \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.227898 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-scripts\") pod \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.227956 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-combined-ca-bundle\") pod \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.228156 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrkft\" (UniqueName: \"kubernetes.io/projected/6058fcb9-4495-466b-85ac-ac1f0f5ac333-kube-api-access-jrkft\") pod \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\" (UID: \"6058fcb9-4495-466b-85ac-ac1f0f5ac333\") " Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.234288 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6058fcb9-4495-466b-85ac-ac1f0f5ac333" (UID: "6058fcb9-4495-466b-85ac-ac1f0f5ac333"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.245513 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-scripts" (OuterVolumeSpecName: "scripts") pod "6058fcb9-4495-466b-85ac-ac1f0f5ac333" (UID: "6058fcb9-4495-466b-85ac-ac1f0f5ac333"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.245842 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6058fcb9-4495-466b-85ac-ac1f0f5ac333-kube-api-access-jrkft" (OuterVolumeSpecName: "kube-api-access-jrkft") pod "6058fcb9-4495-466b-85ac-ac1f0f5ac333" (UID: "6058fcb9-4495-466b-85ac-ac1f0f5ac333"). InnerVolumeSpecName "kube-api-access-jrkft". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.266866 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6058fcb9-4495-466b-85ac-ac1f0f5ac333" (UID: "6058fcb9-4495-466b-85ac-ac1f0f5ac333"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.272808 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-config-data" (OuterVolumeSpecName: "config-data") pod "6058fcb9-4495-466b-85ac-ac1f0f5ac333" (UID: "6058fcb9-4495-466b-85ac-ac1f0f5ac333"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.272902 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6058fcb9-4495-466b-85ac-ac1f0f5ac333" (UID: "6058fcb9-4495-466b-85ac-ac1f0f5ac333"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.330369 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrkft\" (UniqueName: \"kubernetes.io/projected/6058fcb9-4495-466b-85ac-ac1f0f5ac333-kube-api-access-jrkft\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.330403 4941 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.330413 4941 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.330421 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.330431 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.330440 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6058fcb9-4495-466b-85ac-ac1f0f5ac333-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.690871 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tpcw2" event={"ID":"6058fcb9-4495-466b-85ac-ac1f0f5ac333","Type":"ContainerDied","Data":"510009f0c5665b98cee612e847ad45e7aaf290c8a866c66b65cc96d93c253bbc"} Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.690908 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="510009f0c5665b98cee612e847ad45e7aaf290c8a866c66b65cc96d93c253bbc" Nov 30 07:05:23 crc kubenswrapper[4941]: I1130 07:05:23.690930 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tpcw2" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.247653 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tpcw2"] Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.254930 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tpcw2"] Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.337683 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-wj8qz"] Nov 30 07:05:24 crc kubenswrapper[4941]: E1130 07:05:24.338252 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6058fcb9-4495-466b-85ac-ac1f0f5ac333" containerName="keystone-bootstrap" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.338275 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6058fcb9-4495-466b-85ac-ac1f0f5ac333" containerName="keystone-bootstrap" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.338526 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6058fcb9-4495-466b-85ac-ac1f0f5ac333" containerName="keystone-bootstrap" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.342451 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.344868 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.345768 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.346112 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.351151 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ds5bd" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.351515 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.367526 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-wj8qz"] Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.455672 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-combined-ca-bundle\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.455730 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-credential-keys\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.456404 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-config-data\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.456588 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-scripts\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.456625 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm5bp\" (UniqueName: \"kubernetes.io/projected/727089d5-851d-4e5e-80ed-f09e770f1fe2-kube-api-access-sm5bp\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.456766 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-fernet-keys\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.558392 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-scripts\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.559178 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm5bp\" (UniqueName: \"kubernetes.io/projected/727089d5-851d-4e5e-80ed-f09e770f1fe2-kube-api-access-sm5bp\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.559247 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-fernet-keys\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.559351 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-combined-ca-bundle\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.559375 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-credential-keys\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.559584 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-config-data\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.564414 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-scripts\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.565201 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-credential-keys\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.565484 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-fernet-keys\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.566513 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-combined-ca-bundle\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.567259 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-config-data\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.586763 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm5bp\" (UniqueName: \"kubernetes.io/projected/727089d5-851d-4e5e-80ed-f09e770f1fe2-kube-api-access-sm5bp\") pod \"keystone-bootstrap-wj8qz\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:24 crc kubenswrapper[4941]: I1130 07:05:24.666714 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:25 crc kubenswrapper[4941]: I1130 07:05:25.539114 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6058fcb9-4495-466b-85ac-ac1f0f5ac333" path="/var/lib/kubelet/pods/6058fcb9-4495-466b-85ac-ac1f0f5ac333/volumes" Nov 30 07:05:29 crc kubenswrapper[4941]: I1130 07:05:29.228712 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: i/o timeout" Nov 30 07:05:30 crc kubenswrapper[4941]: I1130 07:05:30.777807 4941 generic.go:334] "Generic (PLEG): container finished" podID="bb8e782b-25d0-452c-a19a-7e43765f4200" containerID="d1444176c7f62858717bcdb5f8821c6cf482c99c0b1bac70adccf0b14c0cde7c" exitCode=0 Nov 30 07:05:30 crc kubenswrapper[4941]: I1130 07:05:30.777897 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wtqm6" event={"ID":"bb8e782b-25d0-452c-a19a-7e43765f4200","Type":"ContainerDied","Data":"d1444176c7f62858717bcdb5f8821c6cf482c99c0b1bac70adccf0b14c0cde7c"} Nov 30 07:05:32 crc kubenswrapper[4941]: E1130 07:05:32.202492 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23" Nov 30 07:05:32 crc kubenswrapper[4941]: E1130 07:05:32.202920 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:2051e26a441f1ce22aeb8daa0137559d89bded994db8141c11dd580ae6d07a23,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n6dh64bh548h56fh668hf6h679h66dh64ch5fch54ch644h66h547h66ch65fh5f9h59h5d5hd4h5f9h6ch54h566h659h574h5bh5b6h77h68dh5b9h687q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dw8zc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(20e82100-3d40-4b16-be58-15e74c1aee65): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.203370 4941 scope.go:117] "RemoveContainer" containerID="2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.322263 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.325893 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428055 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-nb\") pod \"f2064ce3-0785-4fa0-913d-c24f55138a87\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428109 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428155 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-combined-ca-bundle\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428187 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-logs\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428219 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-scripts\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428239 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-config\") pod \"f2064ce3-0785-4fa0-913d-c24f55138a87\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428309 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-public-tls-certs\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428436 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-config-data\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428492 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-dns-svc\") pod \"f2064ce3-0785-4fa0-913d-c24f55138a87\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428598 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-httpd-run\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428622 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-sb\") pod \"f2064ce3-0785-4fa0-913d-c24f55138a87\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428669 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shcv4\" (UniqueName: \"kubernetes.io/projected/b3422143-7872-4217-94e5-f1be044d7858-kube-api-access-shcv4\") pod \"b3422143-7872-4217-94e5-f1be044d7858\" (UID: \"b3422143-7872-4217-94e5-f1be044d7858\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428697 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp85m\" (UniqueName: \"kubernetes.io/projected/f2064ce3-0785-4fa0-913d-c24f55138a87-kube-api-access-jp85m\") pod \"f2064ce3-0785-4fa0-913d-c24f55138a87\" (UID: \"f2064ce3-0785-4fa0-913d-c24f55138a87\") " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.428843 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-logs" (OuterVolumeSpecName: "logs") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.429049 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.429690 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.433319 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3422143-7872-4217-94e5-f1be044d7858-kube-api-access-shcv4" (OuterVolumeSpecName: "kube-api-access-shcv4") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "kube-api-access-shcv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.434044 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2064ce3-0785-4fa0-913d-c24f55138a87-kube-api-access-jp85m" (OuterVolumeSpecName: "kube-api-access-jp85m") pod "f2064ce3-0785-4fa0-913d-c24f55138a87" (UID: "f2064ce3-0785-4fa0-913d-c24f55138a87"). InnerVolumeSpecName "kube-api-access-jp85m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.439429 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-scripts" (OuterVolumeSpecName: "scripts") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.446551 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.454457 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.475211 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-config-data" (OuterVolumeSpecName: "config-data") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.478932 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f2064ce3-0785-4fa0-913d-c24f55138a87" (UID: "f2064ce3-0785-4fa0-913d-c24f55138a87"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.481167 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f2064ce3-0785-4fa0-913d-c24f55138a87" (UID: "f2064ce3-0785-4fa0-913d-c24f55138a87"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.483064 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b3422143-7872-4217-94e5-f1be044d7858" (UID: "b3422143-7872-4217-94e5-f1be044d7858"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.492970 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-config" (OuterVolumeSpecName: "config") pod "f2064ce3-0785-4fa0-913d-c24f55138a87" (UID: "f2064ce3-0785-4fa0-913d-c24f55138a87"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.495077 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f2064ce3-0785-4fa0-913d-c24f55138a87" (UID: "f2064ce3-0785-4fa0-913d-c24f55138a87"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531657 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531701 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531715 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531728 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531739 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3422143-7872-4217-94e5-f1be044d7858-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531749 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531759 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b3422143-7872-4217-94e5-f1be044d7858-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531769 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531779 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shcv4\" (UniqueName: \"kubernetes.io/projected/b3422143-7872-4217-94e5-f1be044d7858-kube-api-access-shcv4\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531794 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp85m\" (UniqueName: \"kubernetes.io/projected/f2064ce3-0785-4fa0-913d-c24f55138a87-kube-api-access-jp85m\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531804 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2064ce3-0785-4fa0-913d-c24f55138a87-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.531857 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.553908 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.633490 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.799466 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b3422143-7872-4217-94e5-f1be044d7858","Type":"ContainerDied","Data":"19fc60a826246e65dd97c3184f70ac78e9e48da5617a55bac05254854b9c9d85"} Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.799548 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.803031 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" event={"ID":"f2064ce3-0785-4fa0-913d-c24f55138a87","Type":"ContainerDied","Data":"ea6bf0336014c20654ca9d472bdad10e9bf6000e427a8f7d44092e24c37e7272"} Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.803090 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.867461 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.876883 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.885413 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-wg6p7"] Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.907365 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f6d79597f-wg6p7"] Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.907445 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:32 crc kubenswrapper[4941]: E1130 07:05:32.907766 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="dnsmasq-dns" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.907784 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="dnsmasq-dns" Nov 30 07:05:32 crc kubenswrapper[4941]: E1130 07:05:32.907796 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-httpd" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.907803 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-httpd" Nov 30 07:05:32 crc kubenswrapper[4941]: E1130 07:05:32.907820 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-log" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.907825 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-log" Nov 30 07:05:32 crc kubenswrapper[4941]: E1130 07:05:32.907852 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="init" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.907859 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="init" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.908036 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="dnsmasq-dns" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.908059 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-log" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.908073 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3422143-7872-4217-94e5-f1be044d7858" containerName="glance-httpd" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.908876 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.908961 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.928830 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.929065 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.940148 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.940188 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-config-data\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.942609 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.942691 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-logs\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.942919 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.942983 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk4dt\" (UniqueName: \"kubernetes.io/projected/eacf46e9-d6f3-4b22-9999-90bdb57164f9-kube-api-access-gk4dt\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.943018 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:32 crc kubenswrapper[4941]: I1130 07:05:32.943193 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-scripts\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.045946 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-config-data\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046062 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046109 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-logs\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046196 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046224 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk4dt\" (UniqueName: \"kubernetes.io/projected/eacf46e9-d6f3-4b22-9999-90bdb57164f9-kube-api-access-gk4dt\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046268 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046357 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-scripts\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046653 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.046831 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.047000 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-logs\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.052343 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.052978 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.060468 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-config-data\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.064986 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-scripts\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.065478 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk4dt\" (UniqueName: \"kubernetes.io/projected/eacf46e9-d6f3-4b22-9999-90bdb57164f9-kube-api-access-gk4dt\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.103130 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.248762 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.531952 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3422143-7872-4217-94e5-f1be044d7858" path="/var/lib/kubelet/pods/b3422143-7872-4217-94e5-f1be044d7858/volumes" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.532924 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" path="/var/lib/kubelet/pods/f2064ce3-0785-4fa0-913d-c24f55138a87/volumes" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.819935 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wtqm6" event={"ID":"bb8e782b-25d0-452c-a19a-7e43765f4200","Type":"ContainerDied","Data":"e781b1c1b03ca0965534a4e5a2371be77dda84658271438895b09cfaf2f117a6"} Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.819980 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e781b1c1b03ca0965534a4e5a2371be77dda84658271438895b09cfaf2f117a6" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.824301 4941 scope.go:117] "RemoveContainer" containerID="e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39" Nov 30 07:05:33 crc kubenswrapper[4941]: E1130 07:05:33.836999 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39\": container with ID starting with e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39 not found: ID does not exist" containerID="e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.837063 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39"} err="failed to get container status \"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39\": rpc error: code = NotFound desc = could not find container \"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39\": container with ID starting with e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39 not found: ID does not exist" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.837096 4941 scope.go:117] "RemoveContainer" containerID="2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6" Nov 30 07:05:33 crc kubenswrapper[4941]: E1130 07:05:33.837731 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6\": container with ID starting with 2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6 not found: ID does not exist" containerID="2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.837776 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6"} err="failed to get container status \"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6\": rpc error: code = NotFound desc = could not find container \"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6\": container with ID starting with 2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6 not found: ID does not exist" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.837803 4941 scope.go:117] "RemoveContainer" containerID="e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.838867 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39"} err="failed to get container status \"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39\": rpc error: code = NotFound desc = could not find container \"e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39\": container with ID starting with e9854313865db3fe65f1ef516890b15fd5cbaeb9d32f611262e00329930cec39 not found: ID does not exist" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.838900 4941 scope.go:117] "RemoveContainer" containerID="2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.839743 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6"} err="failed to get container status \"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6\": rpc error: code = NotFound desc = could not find container \"2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6\": container with ID starting with 2e54e745a560cf0375ff7bf26694f77a61880867941e809cfa65234e6b75d5b6 not found: ID does not exist" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.839790 4941 scope.go:117] "RemoveContainer" containerID="f1537bff15cdfd93022aff7232d986b90b1516b9cdd37a6578cb802e96242d55" Nov 30 07:05:33 crc kubenswrapper[4941]: E1130 07:05:33.846857 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0" Nov 30 07:05:33 crc kubenswrapper[4941]: E1130 07:05:33.848181 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s4thd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-5zxfl_openstack(2ee0724a-81f2-4b40-959f-e831f4d9abf5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 07:05:33 crc kubenswrapper[4941]: E1130 07:05:33.849465 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-5zxfl" podUID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" Nov 30 07:05:33 crc kubenswrapper[4941]: I1130 07:05:33.990193 4941 scope.go:117] "RemoveContainer" containerID="d17b55a5118094b06219a0a789efc9065c20e6751b586a64858bac3a43221e13" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.049034 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.076644 4941 scope.go:117] "RemoveContainer" containerID="cfa64cfa6ce7ea072fbcdd1db81afa8d60832fe8d0ea99c2a0036c7e0e231ce5" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.087427 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8th8\" (UniqueName: \"kubernetes.io/projected/bb8e782b-25d0-452c-a19a-7e43765f4200-kube-api-access-c8th8\") pod \"bb8e782b-25d0-452c-a19a-7e43765f4200\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.091788 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb8e782b-25d0-452c-a19a-7e43765f4200-kube-api-access-c8th8" (OuterVolumeSpecName: "kube-api-access-c8th8") pod "bb8e782b-25d0-452c-a19a-7e43765f4200" (UID: "bb8e782b-25d0-452c-a19a-7e43765f4200"). InnerVolumeSpecName "kube-api-access-c8th8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.096684 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-combined-ca-bundle\") pod \"bb8e782b-25d0-452c-a19a-7e43765f4200\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.096859 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-config\") pod \"bb8e782b-25d0-452c-a19a-7e43765f4200\" (UID: \"bb8e782b-25d0-452c-a19a-7e43765f4200\") " Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.098841 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8th8\" (UniqueName: \"kubernetes.io/projected/bb8e782b-25d0-452c-a19a-7e43765f4200-kube-api-access-c8th8\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.121415 4941 scope.go:117] "RemoveContainer" containerID="72615f93172b5338c28e3b495d4060d37ace32a7acd2283df14a81f08bdd28ed" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.194763 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb8e782b-25d0-452c-a19a-7e43765f4200" (UID: "bb8e782b-25d0-452c-a19a-7e43765f4200"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.198628 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-config" (OuterVolumeSpecName: "config") pod "bb8e782b-25d0-452c-a19a-7e43765f4200" (UID: "bb8e782b-25d0-452c-a19a-7e43765f4200"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.202376 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.202417 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/bb8e782b-25d0-452c-a19a-7e43765f4200-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.231136 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f6d79597f-wg6p7" podUID="f2064ce3-0785-4fa0-913d-c24f55138a87" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: i/o timeout" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.527315 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-wj8qz"] Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.540876 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.640088 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:05:34 crc kubenswrapper[4941]: W1130 07:05:34.658516 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeacf46e9_d6f3_4b22_9999_90bdb57164f9.slice/crio-73630b9a9dd461bad2294aa74e41302faf73b4ef1223963181fe1a87afd212d7 WatchSource:0}: Error finding container 73630b9a9dd461bad2294aa74e41302faf73b4ef1223963181fe1a87afd212d7: Status 404 returned error can't find the container with id 73630b9a9dd461bad2294aa74e41302faf73b4ef1223963181fe1a87afd212d7 Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.833362 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eacf46e9-d6f3-4b22-9999-90bdb57164f9","Type":"ContainerStarted","Data":"73630b9a9dd461bad2294aa74e41302faf73b4ef1223963181fe1a87afd212d7"} Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.837680 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hbkr7" event={"ID":"d4e9c60f-d375-4332-ab45-93a3e96be457","Type":"ContainerStarted","Data":"466cbbd6f33788f86483c8f7ea35221c9274dc72718366bea0cfb6536d39f3c0"} Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.840571 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wj8qz" event={"ID":"727089d5-851d-4e5e-80ed-f09e770f1fe2","Type":"ContainerStarted","Data":"7e60a0a05344af48ff485b4a07f2c2c2df39e813faeed0cd0ad52d95c70eb25d"} Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.840617 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wj8qz" event={"ID":"727089d5-851d-4e5e-80ed-f09e770f1fe2","Type":"ContainerStarted","Data":"90109f7cbd5697860d1a84773995a77af4f12251376a23c3af2abf2e518597a3"} Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.845883 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jlmlm" event={"ID":"414c3399-b5e5-4e28-a1ab-d646fa3193fe","Type":"ContainerStarted","Data":"36b2bd1a497fffdb80bcceec0ebd1e8f6f365d640568c939adc2448b434c62b9"} Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.848525 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wtqm6" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.849043 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ea693ac4-4f2c-42fa-b59e-080e1f72ff77","Type":"ContainerStarted","Data":"024a93520c60e9fc7c564ed20dfccb1736f905931407c4ca073bd583b862c3b0"} Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.858688 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-hbkr7" podStartSLOduration=3.396866553 podStartE2EDuration="26.858673611s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="2025-11-30 07:05:10.324335666 +0000 UTC m=+1131.092507275" lastFinishedPulling="2025-11-30 07:05:33.786142724 +0000 UTC m=+1154.554314333" observedRunningTime="2025-11-30 07:05:34.855592535 +0000 UTC m=+1155.623764144" watchObservedRunningTime="2025-11-30 07:05:34.858673611 +0000 UTC m=+1155.626845220" Nov 30 07:05:34 crc kubenswrapper[4941]: E1130 07:05:34.867772 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b5266c9a26766fce2b92f95dff52d362a760f7baf1474cdcb33bd68570e096c0\\\"\"" pod="openstack/cinder-db-sync-5zxfl" podUID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.877499 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jlmlm" podStartSLOduration=4.974978306 podStartE2EDuration="26.877480887s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="2025-11-30 07:05:10.31706772 +0000 UTC m=+1131.085239329" lastFinishedPulling="2025-11-30 07:05:32.219570281 +0000 UTC m=+1152.987741910" observedRunningTime="2025-11-30 07:05:34.872586254 +0000 UTC m=+1155.640757863" watchObservedRunningTime="2025-11-30 07:05:34.877480887 +0000 UTC m=+1155.645652496" Nov 30 07:05:34 crc kubenswrapper[4941]: I1130 07:05:34.912171 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-wj8qz" podStartSLOduration=10.912155557 podStartE2EDuration="10.912155557s" podCreationTimestamp="2025-11-30 07:05:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:34.904917412 +0000 UTC m=+1155.673089021" watchObservedRunningTime="2025-11-30 07:05:34.912155557 +0000 UTC m=+1155.680327166" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.208877 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-r6lks"] Nov 30 07:05:35 crc kubenswrapper[4941]: E1130 07:05:35.209849 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb8e782b-25d0-452c-a19a-7e43765f4200" containerName="neutron-db-sync" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.209898 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb8e782b-25d0-452c-a19a-7e43765f4200" containerName="neutron-db-sync" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.210226 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb8e782b-25d0-452c-a19a-7e43765f4200" containerName="neutron-db-sync" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.216285 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.230186 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-r6lks"] Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.305028 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-d79c95d68-mszfh"] Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.312015 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.318021 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.318229 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-vd4td" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.318414 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.318554 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.338972 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d79c95d68-mszfh"] Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.352161 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.352214 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-config\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.352232 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-svc\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.352266 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-nb\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.352284 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7r2m\" (UniqueName: \"kubernetes.io/projected/356ab68a-ffce-4827-b044-0bce069c508a-kube-api-access-w7r2m\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.352308 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-swift-storage-0\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.453854 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf82l\" (UniqueName: \"kubernetes.io/projected/14e3ee9e-71a8-4ebe-843a-a9b875995aea-kube-api-access-xf82l\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.453911 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.453949 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-config\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.453969 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-svc\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454017 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-nb\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454043 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-config\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454069 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7r2m\" (UniqueName: \"kubernetes.io/projected/356ab68a-ffce-4827-b044-0bce069c508a-kube-api-access-w7r2m\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454092 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-httpd-config\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454125 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-swift-storage-0\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454154 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-combined-ca-bundle\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.454454 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-ovndb-tls-certs\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.455057 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-config\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.456839 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.457025 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-svc\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.457103 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-swift-storage-0\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.457544 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-nb\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.477927 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7r2m\" (UniqueName: \"kubernetes.io/projected/356ab68a-ffce-4827-b044-0bce069c508a-kube-api-access-w7r2m\") pod \"dnsmasq-dns-7f677dd449-r6lks\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.558145 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-combined-ca-bundle\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.558322 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-ovndb-tls-certs\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.559215 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf82l\" (UniqueName: \"kubernetes.io/projected/14e3ee9e-71a8-4ebe-843a-a9b875995aea-kube-api-access-xf82l\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.559672 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-config\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.559718 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-httpd-config\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.619997 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-combined-ca-bundle\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.624595 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-httpd-config\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.627271 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-config\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.630375 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf82l\" (UniqueName: \"kubernetes.io/projected/14e3ee9e-71a8-4ebe-843a-a9b875995aea-kube-api-access-xf82l\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.631277 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-ovndb-tls-certs\") pod \"neutron-d79c95d68-mszfh\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.632089 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.722457 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.886240 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerStarted","Data":"896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea"} Nov 30 07:05:35 crc kubenswrapper[4941]: I1130 07:05:35.896725 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ea693ac4-4f2c-42fa-b59e-080e1f72ff77","Type":"ContainerStarted","Data":"54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.239261 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-r6lks"] Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.619921 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-d79c95d68-mszfh"] Nov 30 07:05:36 crc kubenswrapper[4941]: W1130 07:05:36.645731 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14e3ee9e_71a8_4ebe_843a_a9b875995aea.slice/crio-b036ab08e61a7b3b38949fe7f60f1b7c6b71faee28523b50daa9365a68d18bb5 WatchSource:0}: Error finding container b036ab08e61a7b3b38949fe7f60f1b7c6b71faee28523b50daa9365a68d18bb5: Status 404 returned error can't find the container with id b036ab08e61a7b3b38949fe7f60f1b7c6b71faee28523b50daa9365a68d18bb5 Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.910691 4941 generic.go:334] "Generic (PLEG): container finished" podID="356ab68a-ffce-4827-b044-0bce069c508a" containerID="0be944b2611c455b256e4373bd28d21687a94e0fd2f3d455292e254a865c9a79" exitCode=0 Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.912509 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" event={"ID":"356ab68a-ffce-4827-b044-0bce069c508a","Type":"ContainerDied","Data":"0be944b2611c455b256e4373bd28d21687a94e0fd2f3d455292e254a865c9a79"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.912556 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" event={"ID":"356ab68a-ffce-4827-b044-0bce069c508a","Type":"ContainerStarted","Data":"b352468f9cddbbbf6f1657032e643be1452f15dfb1b21f9f0817f598eab9a62c"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.918632 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eacf46e9-d6f3-4b22-9999-90bdb57164f9","Type":"ContainerStarted","Data":"ebbf634be703d8b94988f93bd497dfce040493da4962912a2d0670776bf39096"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.918701 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eacf46e9-d6f3-4b22-9999-90bdb57164f9","Type":"ContainerStarted","Data":"52864eecab672111de43913d9eb689e210590535ffe9e8b70c63d3d9ca0ae2b3"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.922244 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d79c95d68-mszfh" event={"ID":"14e3ee9e-71a8-4ebe-843a-a9b875995aea","Type":"ContainerStarted","Data":"0161e266452b785a55d6813c53f14f9ef6eec721c43285425bffea10ab4abfaa"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.922450 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d79c95d68-mszfh" event={"ID":"14e3ee9e-71a8-4ebe-843a-a9b875995aea","Type":"ContainerStarted","Data":"b036ab08e61a7b3b38949fe7f60f1b7c6b71faee28523b50daa9365a68d18bb5"} Nov 30 07:05:36 crc kubenswrapper[4941]: I1130 07:05:36.944731 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ea693ac4-4f2c-42fa-b59e-080e1f72ff77","Type":"ContainerStarted","Data":"7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f"} Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.006036 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.006010025 podStartE2EDuration="5.006010025s" podCreationTimestamp="2025-11-30 07:05:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:36.961978703 +0000 UTC m=+1157.730150312" watchObservedRunningTime="2025-11-30 07:05:37.006010025 +0000 UTC m=+1157.774181634" Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.020423 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=23.020396043 podStartE2EDuration="23.020396043s" podCreationTimestamp="2025-11-30 07:05:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:36.996635003 +0000 UTC m=+1157.764806612" watchObservedRunningTime="2025-11-30 07:05:37.020396043 +0000 UTC m=+1157.788567652" Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.967422 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d79c95d68-mszfh" event={"ID":"14e3ee9e-71a8-4ebe-843a-a9b875995aea","Type":"ContainerStarted","Data":"d3c6082ab51d39ca0408b622f5558557161f66113397c83e314cb554460603eb"} Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.968558 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.975144 4941 generic.go:334] "Generic (PLEG): container finished" podID="414c3399-b5e5-4e28-a1ab-d646fa3193fe" containerID="36b2bd1a497fffdb80bcceec0ebd1e8f6f365d640568c939adc2448b434c62b9" exitCode=0 Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.975241 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jlmlm" event={"ID":"414c3399-b5e5-4e28-a1ab-d646fa3193fe","Type":"ContainerDied","Data":"36b2bd1a497fffdb80bcceec0ebd1e8f6f365d640568c939adc2448b434c62b9"} Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.978511 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" event={"ID":"356ab68a-ffce-4827-b044-0bce069c508a","Type":"ContainerStarted","Data":"dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298"} Nov 30 07:05:37 crc kubenswrapper[4941]: I1130 07:05:37.979613 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.003407 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-d79c95d68-mszfh" podStartSLOduration=3.00338747 podStartE2EDuration="3.00338747s" podCreationTimestamp="2025-11-30 07:05:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:37.993253774 +0000 UTC m=+1158.761425383" watchObservedRunningTime="2025-11-30 07:05:38.00338747 +0000 UTC m=+1158.771559079" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.051468 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" podStartSLOduration=3.051448698 podStartE2EDuration="3.051448698s" podCreationTimestamp="2025-11-30 07:05:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:38.043626024 +0000 UTC m=+1158.811797653" watchObservedRunningTime="2025-11-30 07:05:38.051448698 +0000 UTC m=+1158.819620307" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.461307 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5678756fc7-642xv"] Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.463149 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.465679 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.471208 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.479769 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5678756fc7-642xv"] Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.642900 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g87xh\" (UniqueName: \"kubernetes.io/projected/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-kube-api-access-g87xh\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.642959 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-combined-ca-bundle\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.642975 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-internal-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.643008 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-ovndb-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.643084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-public-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.643135 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-httpd-config\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.643161 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-config\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.745958 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-public-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.746083 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-httpd-config\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.746128 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-config\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.746200 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g87xh\" (UniqueName: \"kubernetes.io/projected/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-kube-api-access-g87xh\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.746231 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-combined-ca-bundle\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.746261 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-internal-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.746303 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-ovndb-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.754281 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-ovndb-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.756049 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-internal-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.768357 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-public-tls-certs\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.769198 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-httpd-config\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.769584 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-combined-ca-bundle\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.772028 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-config\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.772942 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g87xh\" (UniqueName: \"kubernetes.io/projected/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-kube-api-access-g87xh\") pod \"neutron-5678756fc7-642xv\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.785779 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.990584 4941 generic.go:334] "Generic (PLEG): container finished" podID="727089d5-851d-4e5e-80ed-f09e770f1fe2" containerID="7e60a0a05344af48ff485b4a07f2c2c2df39e813faeed0cd0ad52d95c70eb25d" exitCode=0 Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.990655 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wj8qz" event={"ID":"727089d5-851d-4e5e-80ed-f09e770f1fe2","Type":"ContainerDied","Data":"7e60a0a05344af48ff485b4a07f2c2c2df39e813faeed0cd0ad52d95c70eb25d"} Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.992203 4941 generic.go:334] "Generic (PLEG): container finished" podID="d4e9c60f-d375-4332-ab45-93a3e96be457" containerID="466cbbd6f33788f86483c8f7ea35221c9274dc72718366bea0cfb6536d39f3c0" exitCode=0 Nov 30 07:05:38 crc kubenswrapper[4941]: I1130 07:05:38.992317 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hbkr7" event={"ID":"d4e9c60f-d375-4332-ab45-93a3e96be457","Type":"ContainerDied","Data":"466cbbd6f33788f86483c8f7ea35221c9274dc72718366bea0cfb6536d39f3c0"} Nov 30 07:05:43 crc kubenswrapper[4941]: I1130 07:05:43.249173 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 07:05:43 crc kubenswrapper[4941]: I1130 07:05:43.249836 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 07:05:43 crc kubenswrapper[4941]: I1130 07:05:43.295511 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 07:05:43 crc kubenswrapper[4941]: I1130 07:05:43.317935 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.041165 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.041208 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.734554 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.743397 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.769145 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780674 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-scripts\") pod \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780748 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-scripts\") pod \"727089d5-851d-4e5e-80ed-f09e770f1fe2\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780782 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcxj4\" (UniqueName: \"kubernetes.io/projected/414c3399-b5e5-4e28-a1ab-d646fa3193fe-kube-api-access-jcxj4\") pod \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780809 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-fernet-keys\") pod \"727089d5-851d-4e5e-80ed-f09e770f1fe2\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780867 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-combined-ca-bundle\") pod \"727089d5-851d-4e5e-80ed-f09e770f1fe2\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780901 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data\") pod \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.780953 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/414c3399-b5e5-4e28-a1ab-d646fa3193fe-logs\") pod \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.781017 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-combined-ca-bundle\") pod \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.781152 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm5bp\" (UniqueName: \"kubernetes.io/projected/727089d5-851d-4e5e-80ed-f09e770f1fe2-kube-api-access-sm5bp\") pod \"727089d5-851d-4e5e-80ed-f09e770f1fe2\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.781179 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-config-data\") pod \"727089d5-851d-4e5e-80ed-f09e770f1fe2\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.781276 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-credential-keys\") pod \"727089d5-851d-4e5e-80ed-f09e770f1fe2\" (UID: \"727089d5-851d-4e5e-80ed-f09e770f1fe2\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.787089 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/414c3399-b5e5-4e28-a1ab-d646fa3193fe-logs" (OuterVolumeSpecName: "logs") pod "414c3399-b5e5-4e28-a1ab-d646fa3193fe" (UID: "414c3399-b5e5-4e28-a1ab-d646fa3193fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.789178 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-scripts" (OuterVolumeSpecName: "scripts") pod "727089d5-851d-4e5e-80ed-f09e770f1fe2" (UID: "727089d5-851d-4e5e-80ed-f09e770f1fe2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.795681 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "727089d5-851d-4e5e-80ed-f09e770f1fe2" (UID: "727089d5-851d-4e5e-80ed-f09e770f1fe2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.802889 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/414c3399-b5e5-4e28-a1ab-d646fa3193fe-kube-api-access-jcxj4" (OuterVolumeSpecName: "kube-api-access-jcxj4") pod "414c3399-b5e5-4e28-a1ab-d646fa3193fe" (UID: "414c3399-b5e5-4e28-a1ab-d646fa3193fe"). InnerVolumeSpecName "kube-api-access-jcxj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.805438 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-scripts" (OuterVolumeSpecName: "scripts") pod "414c3399-b5e5-4e28-a1ab-d646fa3193fe" (UID: "414c3399-b5e5-4e28-a1ab-d646fa3193fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.807945 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/727089d5-851d-4e5e-80ed-f09e770f1fe2-kube-api-access-sm5bp" (OuterVolumeSpecName: "kube-api-access-sm5bp") pod "727089d5-851d-4e5e-80ed-f09e770f1fe2" (UID: "727089d5-851d-4e5e-80ed-f09e770f1fe2"). InnerVolumeSpecName "kube-api-access-sm5bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.808387 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "727089d5-851d-4e5e-80ed-f09e770f1fe2" (UID: "727089d5-851d-4e5e-80ed-f09e770f1fe2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.876248 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-config-data" (OuterVolumeSpecName: "config-data") pod "727089d5-851d-4e5e-80ed-f09e770f1fe2" (UID: "727089d5-851d-4e5e-80ed-f09e770f1fe2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.880948 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "727089d5-851d-4e5e-80ed-f09e770f1fe2" (UID: "727089d5-851d-4e5e-80ed-f09e770f1fe2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.881438 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "414c3399-b5e5-4e28-a1ab-d646fa3193fe" (UID: "414c3399-b5e5-4e28-a1ab-d646fa3193fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.883222 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data" (OuterVolumeSpecName: "config-data") pod "414c3399-b5e5-4e28-a1ab-d646fa3193fe" (UID: "414c3399-b5e5-4e28-a1ab-d646fa3193fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.884894 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fp8x\" (UniqueName: \"kubernetes.io/projected/d4e9c60f-d375-4332-ab45-93a3e96be457-kube-api-access-8fp8x\") pod \"d4e9c60f-d375-4332-ab45-93a3e96be457\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885012 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data\") pod \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\" (UID: \"414c3399-b5e5-4e28-a1ab-d646fa3193fe\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885033 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-combined-ca-bundle\") pod \"d4e9c60f-d375-4332-ab45-93a3e96be457\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885067 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-db-sync-config-data\") pod \"d4e9c60f-d375-4332-ab45-93a3e96be457\" (UID: \"d4e9c60f-d375-4332-ab45-93a3e96be457\") " Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885616 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/414c3399-b5e5-4e28-a1ab-d646fa3193fe-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885634 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885648 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885659 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm5bp\" (UniqueName: \"kubernetes.io/projected/727089d5-851d-4e5e-80ed-f09e770f1fe2-kube-api-access-sm5bp\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885668 4941 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885677 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885688 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885697 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcxj4\" (UniqueName: \"kubernetes.io/projected/414c3399-b5e5-4e28-a1ab-d646fa3193fe-kube-api-access-jcxj4\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885706 4941 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.885715 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/727089d5-851d-4e5e-80ed-f09e770f1fe2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: W1130 07:05:44.886619 4941 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/414c3399-b5e5-4e28-a1ab-d646fa3193fe/volumes/kubernetes.io~secret/config-data Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.887385 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data" (OuterVolumeSpecName: "config-data") pod "414c3399-b5e5-4e28-a1ab-d646fa3193fe" (UID: "414c3399-b5e5-4e28-a1ab-d646fa3193fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.891311 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d4e9c60f-d375-4332-ab45-93a3e96be457" (UID: "d4e9c60f-d375-4332-ab45-93a3e96be457"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.892375 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e9c60f-d375-4332-ab45-93a3e96be457-kube-api-access-8fp8x" (OuterVolumeSpecName: "kube-api-access-8fp8x") pod "d4e9c60f-d375-4332-ab45-93a3e96be457" (UID: "d4e9c60f-d375-4332-ab45-93a3e96be457"). InnerVolumeSpecName "kube-api-access-8fp8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.927546 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4e9c60f-d375-4332-ab45-93a3e96be457" (UID: "d4e9c60f-d375-4332-ab45-93a3e96be457"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.983349 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.983402 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.983413 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.983423 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.987794 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fp8x\" (UniqueName: \"kubernetes.io/projected/d4e9c60f-d375-4332-ab45-93a3e96be457-kube-api-access-8fp8x\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.987822 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/414c3399-b5e5-4e28-a1ab-d646fa3193fe-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.987834 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:44 crc kubenswrapper[4941]: I1130 07:05:44.987844 4941 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d4e9c60f-d375-4332-ab45-93a3e96be457-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.031564 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.033394 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.051028 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wj8qz" event={"ID":"727089d5-851d-4e5e-80ed-f09e770f1fe2","Type":"ContainerDied","Data":"90109f7cbd5697860d1a84773995a77af4f12251376a23c3af2abf2e518597a3"} Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.051079 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90109f7cbd5697860d1a84773995a77af4f12251376a23c3af2abf2e518597a3" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.051081 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wj8qz" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.054125 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerStarted","Data":"e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7"} Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.056094 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jlmlm" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.058098 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jlmlm" event={"ID":"414c3399-b5e5-4e28-a1ab-d646fa3193fe","Type":"ContainerDied","Data":"f4969fc490158f196b79a4ce075438ed0887a20c730a74dc3de3f61df42f2df2"} Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.058151 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4969fc490158f196b79a4ce075438ed0887a20c730a74dc3de3f61df42f2df2" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.065671 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hbkr7" event={"ID":"d4e9c60f-d375-4332-ab45-93a3e96be457","Type":"ContainerDied","Data":"a5009db6ed155c27a228030cd71eb40fdd62416f5c5fe944c6de784d3329c053"} Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.065703 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5009db6ed155c27a228030cd71eb40fdd62416f5c5fe944c6de784d3329c053" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.066389 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hbkr7" Nov 30 07:05:45 crc kubenswrapper[4941]: W1130 07:05:45.114699 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dfbf8e6_60f7_47a0_9fee_3d532daf0503.slice/crio-334e9f4946147d9c94c287f3fb1a4305db1153961bed62868fbe365ca4ca7591 WatchSource:0}: Error finding container 334e9f4946147d9c94c287f3fb1a4305db1153961bed62868fbe365ca4ca7591: Status 404 returned error can't find the container with id 334e9f4946147d9c94c287f3fb1a4305db1153961bed62868fbe365ca4ca7591 Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.115580 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5678756fc7-642xv"] Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.635642 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.705151 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-rtfz6"] Nov 30 07:05:45 crc kubenswrapper[4941]: I1130 07:05:45.705499 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerName="dnsmasq-dns" containerID="cri-o://267ed5a6dd6ec77d04a9c5ce03a3cd0327ea124fdba4126b565c34b34b0d6014" gracePeriod=10 Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.004910 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-79cf87bd4d-c9dvr"] Nov 30 07:05:46 crc kubenswrapper[4941]: E1130 07:05:46.007912 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="414c3399-b5e5-4e28-a1ab-d646fa3193fe" containerName="placement-db-sync" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.008009 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="414c3399-b5e5-4e28-a1ab-d646fa3193fe" containerName="placement-db-sync" Nov 30 07:05:46 crc kubenswrapper[4941]: E1130 07:05:46.008086 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="727089d5-851d-4e5e-80ed-f09e770f1fe2" containerName="keystone-bootstrap" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.008139 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="727089d5-851d-4e5e-80ed-f09e770f1fe2" containerName="keystone-bootstrap" Nov 30 07:05:46 crc kubenswrapper[4941]: E1130 07:05:46.008217 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e9c60f-d375-4332-ab45-93a3e96be457" containerName="barbican-db-sync" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.008271 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e9c60f-d375-4332-ab45-93a3e96be457" containerName="barbican-db-sync" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.008579 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e9c60f-d375-4332-ab45-93a3e96be457" containerName="barbican-db-sync" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.008677 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="727089d5-851d-4e5e-80ed-f09e770f1fe2" containerName="keystone-bootstrap" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.008741 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="414c3399-b5e5-4e28-a1ab-d646fa3193fe" containerName="placement-db-sync" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.009599 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.015061 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.015188 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.015061 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.015446 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7c9965466b-7rmfq"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.046692 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.046960 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.048808 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ds5bd" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.051055 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-79cf87bd4d-c9dvr"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.051187 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.057364 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.057788 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.057924 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-pdqvq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.058063 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.058190 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.083129 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7c9965466b-7rmfq"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.124820 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz7mp\" (UniqueName: \"kubernetes.io/projected/25deaa20-8f61-4317-ad4a-11df9ddff2fe-kube-api-access-nz7mp\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125012 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-fernet-keys\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125061 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-scripts\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125132 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-config-data\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125204 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-combined-ca-bundle\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125235 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-internal-tls-certs\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125275 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-combined-ca-bundle\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125302 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-public-tls-certs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125337 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-credential-keys\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125384 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a758cc8-4546-4982-b2a7-b7824ecfc118-logs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125442 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-scripts\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125493 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-internal-tls-certs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125527 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r4b8\" (UniqueName: \"kubernetes.io/projected/7a758cc8-4546-4982-b2a7-b7824ecfc118-kube-api-access-6r4b8\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125573 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-config-data\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.125594 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-public-tls-certs\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.152485 4941 generic.go:334] "Generic (PLEG): container finished" podID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerID="267ed5a6dd6ec77d04a9c5ce03a3cd0327ea124fdba4126b565c34b34b0d6014" exitCode=0 Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.152581 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" event={"ID":"d71115c3-7782-4396-8e3a-37cdacdb7802","Type":"ContainerDied","Data":"267ed5a6dd6ec77d04a9c5ce03a3cd0327ea124fdba4126b565c34b34b0d6014"} Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.221414 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5678756fc7-642xv" event={"ID":"9dfbf8e6-60f7-47a0-9fee-3d532daf0503","Type":"ContainerStarted","Data":"2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc"} Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.221851 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5678756fc7-642xv" event={"ID":"9dfbf8e6-60f7-47a0-9fee-3d532daf0503","Type":"ContainerStarted","Data":"334e9f4946147d9c94c287f3fb1a4305db1153961bed62868fbe365ca4ca7591"} Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232323 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r4b8\" (UniqueName: \"kubernetes.io/projected/7a758cc8-4546-4982-b2a7-b7824ecfc118-kube-api-access-6r4b8\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232409 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-config-data\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232430 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-public-tls-certs\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232507 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz7mp\" (UniqueName: \"kubernetes.io/projected/25deaa20-8f61-4317-ad4a-11df9ddff2fe-kube-api-access-nz7mp\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232611 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-fernet-keys\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232652 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-scripts\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232715 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-config-data\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232777 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-combined-ca-bundle\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232808 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-internal-tls-certs\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232849 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-combined-ca-bundle\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232874 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-public-tls-certs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232901 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-credential-keys\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232940 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a758cc8-4546-4982-b2a7-b7824ecfc118-logs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.232989 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-scripts\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.233033 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-internal-tls-certs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.239903 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-combined-ca-bundle\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.240812 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a758cc8-4546-4982-b2a7-b7824ecfc118-logs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.297109 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-public-tls-certs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.297605 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-scripts\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.298769 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-combined-ca-bundle\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.299281 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-credential-keys\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.299670 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-internal-tls-certs\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.304802 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7987c5dbd6-p8ncc"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.315546 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.305366 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-public-tls-certs\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.312250 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-scripts\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.321484 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-config-data\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.339381 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7987c5dbd6-p8ncc"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.342049 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r4b8\" (UniqueName: \"kubernetes.io/projected/7a758cc8-4546-4982-b2a7-b7824ecfc118-kube-api-access-6r4b8\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.348521 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.348727 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vm2xx" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.348846 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.364266 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-config-data\") pod \"placement-7c9965466b-7rmfq\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.364662 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-internal-tls-certs\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.365097 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-fernet-keys\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.365370 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-74d6754465-nglc5"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.366678 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.368119 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz7mp\" (UniqueName: \"kubernetes.io/projected/25deaa20-8f61-4317-ad4a-11df9ddff2fe-kube-api-access-nz7mp\") pod \"keystone-79cf87bd4d-c9dvr\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.383621 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.393768 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.405956 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-74d6754465-nglc5"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.443276 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data-custom\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.443370 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/126a91a7-8a81-40ef-87db-383ed37a26f4-logs\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.443399 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vthpx\" (UniqueName: \"kubernetes.io/projected/126a91a7-8a81-40ef-87db-383ed37a26f4-kube-api-access-vthpx\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.443505 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-combined-ca-bundle\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.443533 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.478515 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-d492g"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.531640 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.537942 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-d492g"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.549835 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data-custom\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.549910 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/126a91a7-8a81-40ef-87db-383ed37a26f4-logs\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.550182 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.550206 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data-custom\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.550230 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vthpx\" (UniqueName: \"kubernetes.io/projected/126a91a7-8a81-40ef-87db-383ed37a26f4-kube-api-access-vthpx\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.555878 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/126a91a7-8a81-40ef-87db-383ed37a26f4-logs\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.550547 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn6v2\" (UniqueName: \"kubernetes.io/projected/fb74a593-764a-416b-897b-539bafb29c70-kube-api-access-dn6v2\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.556821 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-combined-ca-bundle\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.557256 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-combined-ca-bundle\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.557306 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.557456 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb74a593-764a-416b-897b-539bafb29c70-logs\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.576986 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.579261 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.654115 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.657886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data-custom\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.674967 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ztp9\" (UniqueName: \"kubernetes.io/projected/524a3388-a23b-4d06-a79c-56c5dfb3107d-kube-api-access-4ztp9\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675028 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-sb\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675072 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb74a593-764a-416b-897b-539bafb29c70-logs\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675110 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-swift-storage-0\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675152 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-config\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675207 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-nb\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675290 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675312 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data-custom\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675463 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn6v2\" (UniqueName: \"kubernetes.io/projected/fb74a593-764a-416b-897b-539bafb29c70-kube-api-access-dn6v2\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675494 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-combined-ca-bundle\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.675533 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-svc\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.676951 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb74a593-764a-416b-897b-539bafb29c70-logs\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.693357 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-combined-ca-bundle\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.694904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.714851 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.723344 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vthpx\" (UniqueName: \"kubernetes.io/projected/126a91a7-8a81-40ef-87db-383ed37a26f4-kube-api-access-vthpx\") pod \"barbican-keystone-listener-7987c5dbd6-p8ncc\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.755359 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-combined-ca-bundle\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.757244 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data-custom\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.768042 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn6v2\" (UniqueName: \"kubernetes.io/projected/fb74a593-764a-416b-897b-539bafb29c70-kube-api-access-dn6v2\") pod \"barbican-worker-74d6754465-nglc5\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.779164 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-svc\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.779365 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ztp9\" (UniqueName: \"kubernetes.io/projected/524a3388-a23b-4d06-a79c-56c5dfb3107d-kube-api-access-4ztp9\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.779546 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-sb\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.779669 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-swift-storage-0\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.779772 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-config\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.779883 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-nb\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.781007 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-sb\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.781639 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-swift-storage-0\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.782001 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-nb\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.782774 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-svc\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.783600 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-config\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.787059 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5f6b49b776-wzn74"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.795224 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.817480 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f6b49b776-wzn74"] Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.818531 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.836619 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ztp9\" (UniqueName: \"kubernetes.io/projected/524a3388-a23b-4d06-a79c-56c5dfb3107d-kube-api-access-4ztp9\") pod \"dnsmasq-dns-64dfd64c45-d492g\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.882037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84e0357d-8ac0-467b-883e-04e54d50de54-logs\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.882683 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-combined-ca-bundle\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.882867 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data-custom\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.882998 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxr7h\" (UniqueName: \"kubernetes.io/projected/84e0357d-8ac0-467b-883e-04e54d50de54-kube-api-access-dxr7h\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.883107 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.987922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84e0357d-8ac0-467b-883e-04e54d50de54-logs\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.988358 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-combined-ca-bundle\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.988402 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data-custom\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.988426 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxr7h\" (UniqueName: \"kubernetes.io/projected/84e0357d-8ac0-467b-883e-04e54d50de54-kube-api-access-dxr7h\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.988457 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:46 crc kubenswrapper[4941]: I1130 07:05:46.998349 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84e0357d-8ac0-467b-883e-04e54d50de54-logs\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.001372 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.001965 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-combined-ca-bundle\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.002207 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.007757 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data-custom\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.008174 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.030387 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxr7h\" (UniqueName: \"kubernetes.io/projected/84e0357d-8ac0-467b-883e-04e54d50de54-kube-api-access-dxr7h\") pod \"barbican-api-5f6b49b776-wzn74\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.243146 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" event={"ID":"d71115c3-7782-4396-8e3a-37cdacdb7802","Type":"ContainerDied","Data":"fca3450d3c0b5974cbe676aae0dc687abf0419b76a97f62308c1da14fe079da6"} Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.243502 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fca3450d3c0b5974cbe676aae0dc687abf0419b76a97f62308c1da14fe079da6" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.255352 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5678756fc7-642xv" event={"ID":"9dfbf8e6-60f7-47a0-9fee-3d532daf0503","Type":"ContainerStarted","Data":"539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575"} Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.255580 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.288787 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5678756fc7-642xv" podStartSLOduration=9.288770737 podStartE2EDuration="9.288770737s" podCreationTimestamp="2025-11-30 07:05:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:47.283187793 +0000 UTC m=+1168.051359402" watchObservedRunningTime="2025-11-30 07:05:47.288770737 +0000 UTC m=+1168.056942346" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.371367 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.382171 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.412608 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.519172 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zllb2\" (UniqueName: \"kubernetes.io/projected/d71115c3-7782-4396-8e3a-37cdacdb7802-kube-api-access-zllb2\") pod \"d71115c3-7782-4396-8e3a-37cdacdb7802\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.519768 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-svc\") pod \"d71115c3-7782-4396-8e3a-37cdacdb7802\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.519798 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-nb\") pod \"d71115c3-7782-4396-8e3a-37cdacdb7802\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.520218 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-sb\") pod \"d71115c3-7782-4396-8e3a-37cdacdb7802\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.520268 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-config\") pod \"d71115c3-7782-4396-8e3a-37cdacdb7802\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.520313 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-swift-storage-0\") pod \"d71115c3-7782-4396-8e3a-37cdacdb7802\" (UID: \"d71115c3-7782-4396-8e3a-37cdacdb7802\") " Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.544713 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d71115c3-7782-4396-8e3a-37cdacdb7802-kube-api-access-zllb2" (OuterVolumeSpecName: "kube-api-access-zllb2") pod "d71115c3-7782-4396-8e3a-37cdacdb7802" (UID: "d71115c3-7782-4396-8e3a-37cdacdb7802"). InnerVolumeSpecName "kube-api-access-zllb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.610252 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d71115c3-7782-4396-8e3a-37cdacdb7802" (UID: "d71115c3-7782-4396-8e3a-37cdacdb7802"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.646255 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.646300 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zllb2\" (UniqueName: \"kubernetes.io/projected/d71115c3-7782-4396-8e3a-37cdacdb7802-kube-api-access-zllb2\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.656930 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d71115c3-7782-4396-8e3a-37cdacdb7802" (UID: "d71115c3-7782-4396-8e3a-37cdacdb7802"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.674308 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d71115c3-7782-4396-8e3a-37cdacdb7802" (UID: "d71115c3-7782-4396-8e3a-37cdacdb7802"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.678392 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d71115c3-7782-4396-8e3a-37cdacdb7802" (UID: "d71115c3-7782-4396-8e3a-37cdacdb7802"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.679061 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-config" (OuterVolumeSpecName: "config") pod "d71115c3-7782-4396-8e3a-37cdacdb7802" (UID: "d71115c3-7782-4396-8e3a-37cdacdb7802"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.747707 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.748077 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.748089 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.748098 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d71115c3-7782-4396-8e3a-37cdacdb7802-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.816778 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7c9965466b-7rmfq"] Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.910675 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.910814 4941 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 30 07:05:47 crc kubenswrapper[4941]: I1130 07:05:47.924540 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.054438 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-79cf87bd4d-c9dvr"] Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.071699 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-74d6754465-nglc5"] Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.090254 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7987c5dbd6-p8ncc"] Nov 30 07:05:48 crc kubenswrapper[4941]: W1130 07:05:48.104645 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25deaa20_8f61_4317_ad4a_11df9ddff2fe.slice/crio-08438617d7f95c8ba2fa018c7919936730de77fe645ce598be142c0420069cdc WatchSource:0}: Error finding container 08438617d7f95c8ba2fa018c7919936730de77fe645ce598be142c0420069cdc: Status 404 returned error can't find the container with id 08438617d7f95c8ba2fa018c7919936730de77fe645ce598be142c0420069cdc Nov 30 07:05:48 crc kubenswrapper[4941]: W1130 07:05:48.106676 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb74a593_764a_416b_897b_539bafb29c70.slice/crio-1a4fa8f94dcc08ed93adee28c101d6d0115b4bf5bfaa85632ebf33655179ec1d WatchSource:0}: Error finding container 1a4fa8f94dcc08ed93adee28c101d6d0115b4bf5bfaa85632ebf33655179ec1d: Status 404 returned error can't find the container with id 1a4fa8f94dcc08ed93adee28c101d6d0115b4bf5bfaa85632ebf33655179ec1d Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.192164 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-d492g"] Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.307936 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" event={"ID":"524a3388-a23b-4d06-a79c-56c5dfb3107d","Type":"ContainerStarted","Data":"7909fde426838449ed9fbc670a19d666bb48cf91056352c3481356266f23fdba"} Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.330796 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79cf87bd4d-c9dvr" event={"ID":"25deaa20-8f61-4317-ad4a-11df9ddff2fe","Type":"ContainerStarted","Data":"08438617d7f95c8ba2fa018c7919936730de77fe645ce598be142c0420069cdc"} Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.353259 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74d6754465-nglc5" event={"ID":"fb74a593-764a-416b-897b-539bafb29c70","Type":"ContainerStarted","Data":"1a4fa8f94dcc08ed93adee28c101d6d0115b4bf5bfaa85632ebf33655179ec1d"} Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.357975 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c9965466b-7rmfq" event={"ID":"7a758cc8-4546-4982-b2a7-b7824ecfc118","Type":"ContainerStarted","Data":"066567c51ffcaa136d125b9b00af881dc67f8f5a4a55ced105e493837ac88c12"} Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.364498 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" event={"ID":"126a91a7-8a81-40ef-87db-383ed37a26f4","Type":"ContainerStarted","Data":"f20855645344da1913360ef50f8d99f2d5c39b4ee43b823bc1f5141acf25fbe5"} Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.364771 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f645789c-rtfz6" Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.419526 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f6b49b776-wzn74"] Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.485456 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-rtfz6"] Nov 30 07:05:48 crc kubenswrapper[4941]: I1130 07:05:48.498274 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f645789c-rtfz6"] Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.378690 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5zxfl" event={"ID":"2ee0724a-81f2-4b40-959f-e831f4d9abf5","Type":"ContainerStarted","Data":"9fbbb85ad30a83b7456fb44a31c2209264c5e153a674196f4ae8765e3a9deed8"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.396551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c9965466b-7rmfq" event={"ID":"7a758cc8-4546-4982-b2a7-b7824ecfc118","Type":"ContainerStarted","Data":"009377a60fa215ae8250c38bc50dca1c5bfb321e79ea6ee7dd44dd9fbdcf7b42"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.396598 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c9965466b-7rmfq" event={"ID":"7a758cc8-4546-4982-b2a7-b7824ecfc118","Type":"ContainerStarted","Data":"539d89a15edc70dc4c19cc2280d7df494d363b3461a1f1f53b5b2c3c6f64de13"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.397002 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.397141 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.407515 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-5zxfl" podStartSLOduration=4.280567974 podStartE2EDuration="41.407497069s" podCreationTimestamp="2025-11-30 07:05:08 +0000 UTC" firstStartedPulling="2025-11-30 07:05:10.159730587 +0000 UTC m=+1130.927902196" lastFinishedPulling="2025-11-30 07:05:47.286659682 +0000 UTC m=+1168.054831291" observedRunningTime="2025-11-30 07:05:49.401184903 +0000 UTC m=+1170.169356512" watchObservedRunningTime="2025-11-30 07:05:49.407497069 +0000 UTC m=+1170.175668678" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.413350 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b49b776-wzn74" event={"ID":"84e0357d-8ac0-467b-883e-04e54d50de54","Type":"ContainerStarted","Data":"79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.413415 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b49b776-wzn74" event={"ID":"84e0357d-8ac0-467b-883e-04e54d50de54","Type":"ContainerStarted","Data":"10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.413433 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b49b776-wzn74" event={"ID":"84e0357d-8ac0-467b-883e-04e54d50de54","Type":"ContainerStarted","Data":"39d241e0029d6995a59bf609c05463595f0a9ecad00320860bf8ce32f895354d"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.413736 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.413872 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.438488 4941 generic.go:334] "Generic (PLEG): container finished" podID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerID="61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2" exitCode=0 Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.438561 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" event={"ID":"524a3388-a23b-4d06-a79c-56c5dfb3107d","Type":"ContainerDied","Data":"61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.478168 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79cf87bd4d-c9dvr" event={"ID":"25deaa20-8f61-4317-ad4a-11df9ddff2fe","Type":"ContainerStarted","Data":"b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4"} Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.479079 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.486159 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7c9965466b-7rmfq" podStartSLOduration=4.48613248 podStartE2EDuration="4.48613248s" podCreationTimestamp="2025-11-30 07:05:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:49.459364386 +0000 UTC m=+1170.227535995" watchObservedRunningTime="2025-11-30 07:05:49.48613248 +0000 UTC m=+1170.254304089" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.567518 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" path="/var/lib/kubelet/pods/d71115c3-7782-4396-8e3a-37cdacdb7802/volumes" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.617732 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5f6b49b776-wzn74" podStartSLOduration=3.6177052400000003 podStartE2EDuration="3.61770524s" podCreationTimestamp="2025-11-30 07:05:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:49.596521051 +0000 UTC m=+1170.364692670" watchObservedRunningTime="2025-11-30 07:05:49.61770524 +0000 UTC m=+1170.385876849" Nov 30 07:05:49 crc kubenswrapper[4941]: I1130 07:05:49.729094 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-79cf87bd4d-c9dvr" podStartSLOduration=4.729072632 podStartE2EDuration="4.729072632s" podCreationTimestamp="2025-11-30 07:05:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:49.727309326 +0000 UTC m=+1170.495480935" watchObservedRunningTime="2025-11-30 07:05:49.729072632 +0000 UTC m=+1170.497244241" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.289628 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6794dcdcd8-t9v24"] Nov 30 07:05:50 crc kubenswrapper[4941]: E1130 07:05:50.297595 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerName="dnsmasq-dns" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.297617 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerName="dnsmasq-dns" Nov 30 07:05:50 crc kubenswrapper[4941]: E1130 07:05:50.297649 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerName="init" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.297657 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerName="init" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.297847 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d71115c3-7782-4396-8e3a-37cdacdb7802" containerName="dnsmasq-dns" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.298844 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.304771 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6794dcdcd8-t9v24"] Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.326091 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.326353 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.430203 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-internal-tls-certs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.432923 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-combined-ca-bundle\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.433008 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-public-tls-certs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.433445 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wltsh\" (UniqueName: \"kubernetes.io/projected/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-kube-api-access-wltsh\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.433525 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-logs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.433570 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.433764 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data-custom\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.492812 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" event={"ID":"524a3388-a23b-4d06-a79c-56c5dfb3107d","Type":"ContainerStarted","Data":"453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414"} Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.494281 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.543991 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data-custom\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.544299 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-internal-tls-certs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.544346 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-combined-ca-bundle\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.544466 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-public-tls-certs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.544553 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wltsh\" (UniqueName: \"kubernetes.io/projected/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-kube-api-access-wltsh\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.544604 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-logs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.544695 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.545543 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-logs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.556660 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-combined-ca-bundle\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.557207 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" podStartSLOduration=4.55717552 podStartE2EDuration="4.55717552s" podCreationTimestamp="2025-11-30 07:05:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:50.518248517 +0000 UTC m=+1171.286420126" watchObservedRunningTime="2025-11-30 07:05:50.55717552 +0000 UTC m=+1171.325347129" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.558118 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-public-tls-certs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.558394 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data-custom\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.570954 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.571837 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wltsh\" (UniqueName: \"kubernetes.io/projected/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-kube-api-access-wltsh\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.586783 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-internal-tls-certs\") pod \"barbican-api-6794dcdcd8-t9v24\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:50 crc kubenswrapper[4941]: I1130 07:05:50.635136 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:51 crc kubenswrapper[4941]: I1130 07:05:51.641945 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6794dcdcd8-t9v24"] Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.515090 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" event={"ID":"126a91a7-8a81-40ef-87db-383ed37a26f4","Type":"ContainerStarted","Data":"83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.515551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" event={"ID":"126a91a7-8a81-40ef-87db-383ed37a26f4","Type":"ContainerStarted","Data":"ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.525505 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6794dcdcd8-t9v24" event={"ID":"cf0e4aae-888b-4df8-a6e2-19a5f04b9656","Type":"ContainerStarted","Data":"26ada5d7acc3a461a8a330fae6ff00d6ad1801a8caa3a433600d9307e3c1a50d"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.525550 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6794dcdcd8-t9v24" event={"ID":"cf0e4aae-888b-4df8-a6e2-19a5f04b9656","Type":"ContainerStarted","Data":"aeb000c0386dce47c7a56b3872cfb39b10d750b9a481ebdd2c575d97c0bbecbd"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.525560 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6794dcdcd8-t9v24" event={"ID":"cf0e4aae-888b-4df8-a6e2-19a5f04b9656","Type":"ContainerStarted","Data":"900e56bf351145d2e33c5f723dada72f701a715ec5a9918b6ee37c0e3f41c9a5"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.525596 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.525626 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.531397 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74d6754465-nglc5" event={"ID":"fb74a593-764a-416b-897b-539bafb29c70","Type":"ContainerStarted","Data":"012634e55142fed4221ecd68adf1f8141133456f2d0eb8c2e3aca13cafe2681c"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.531444 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74d6754465-nglc5" event={"ID":"fb74a593-764a-416b-897b-539bafb29c70","Type":"ContainerStarted","Data":"73cb23f3d6e038846d4f5b8f18403308641c3a8d002d0c1fd63ffe41c380b3ef"} Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.532139 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" podStartSLOduration=3.546055467 podStartE2EDuration="6.532117761s" podCreationTimestamp="2025-11-30 07:05:46 +0000 UTC" firstStartedPulling="2025-11-30 07:05:48.145546049 +0000 UTC m=+1168.913717658" lastFinishedPulling="2025-11-30 07:05:51.131608343 +0000 UTC m=+1171.899779952" observedRunningTime="2025-11-30 07:05:52.528984474 +0000 UTC m=+1173.297156083" watchObservedRunningTime="2025-11-30 07:05:52.532117761 +0000 UTC m=+1173.300289360" Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.577841 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6794dcdcd8-t9v24" podStartSLOduration=2.577822705 podStartE2EDuration="2.577822705s" podCreationTimestamp="2025-11-30 07:05:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:05:52.561408174 +0000 UTC m=+1173.329579783" watchObservedRunningTime="2025-11-30 07:05:52.577822705 +0000 UTC m=+1173.345994304" Nov 30 07:05:52 crc kubenswrapper[4941]: I1130 07:05:52.622785 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-74d6754465-nglc5" podStartSLOduration=3.598255414 podStartE2EDuration="6.622763426s" podCreationTimestamp="2025-11-30 07:05:46 +0000 UTC" firstStartedPulling="2025-11-30 07:05:48.109908129 +0000 UTC m=+1168.878079738" lastFinishedPulling="2025-11-30 07:05:51.134416141 +0000 UTC m=+1171.902587750" observedRunningTime="2025-11-30 07:05:52.593770862 +0000 UTC m=+1173.361942471" watchObservedRunningTime="2025-11-30 07:05:52.622763426 +0000 UTC m=+1173.390935035" Nov 30 07:05:54 crc kubenswrapper[4941]: I1130 07:05:54.558684 4941 generic.go:334] "Generic (PLEG): container finished" podID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" containerID="9fbbb85ad30a83b7456fb44a31c2209264c5e153a674196f4ae8765e3a9deed8" exitCode=0 Nov 30 07:05:54 crc kubenswrapper[4941]: I1130 07:05:54.558890 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5zxfl" event={"ID":"2ee0724a-81f2-4b40-959f-e831f4d9abf5","Type":"ContainerDied","Data":"9fbbb85ad30a83b7456fb44a31c2209264c5e153a674196f4ae8765e3a9deed8"} Nov 30 07:05:57 crc kubenswrapper[4941]: I1130 07:05:57.373582 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:05:57 crc kubenswrapper[4941]: I1130 07:05:57.468963 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-r6lks"] Nov 30 07:05:57 crc kubenswrapper[4941]: I1130 07:05:57.469234 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" podUID="356ab68a-ffce-4827-b044-0bce069c508a" containerName="dnsmasq-dns" containerID="cri-o://dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298" gracePeriod=10 Nov 30 07:05:57 crc kubenswrapper[4941]: I1130 07:05:57.636236 4941 generic.go:334] "Generic (PLEG): container finished" podID="356ab68a-ffce-4827-b044-0bce069c508a" containerID="dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298" exitCode=0 Nov 30 07:05:57 crc kubenswrapper[4941]: I1130 07:05:57.636290 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" event={"ID":"356ab68a-ffce-4827-b044-0bce069c508a","Type":"ContainerDied","Data":"dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298"} Nov 30 07:05:57 crc kubenswrapper[4941]: E1130 07:05:57.767859 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod356ab68a_ffce_4827_b044_0bce069c508a.slice/crio-conmon-dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod356ab68a_ffce_4827_b044_0bce069c508a.slice/crio-dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298.scope\": RecentStats: unable to find data in memory cache]" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.328036 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.432271 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-config-data\") pod \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.432396 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-combined-ca-bundle\") pod \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.432430 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-scripts\") pod \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.432539 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4thd\" (UniqueName: \"kubernetes.io/projected/2ee0724a-81f2-4b40-959f-e831f4d9abf5-kube-api-access-s4thd\") pod \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.432583 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-db-sync-config-data\") pod \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.432616 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ee0724a-81f2-4b40-959f-e831f4d9abf5-etc-machine-id\") pod \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\" (UID: \"2ee0724a-81f2-4b40-959f-e831f4d9abf5\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.433060 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ee0724a-81f2-4b40-959f-e831f4d9abf5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2ee0724a-81f2-4b40-959f-e831f4d9abf5" (UID: "2ee0724a-81f2-4b40-959f-e831f4d9abf5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.441852 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-scripts" (OuterVolumeSpecName: "scripts") pod "2ee0724a-81f2-4b40-959f-e831f4d9abf5" (UID: "2ee0724a-81f2-4b40-959f-e831f4d9abf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.446870 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2ee0724a-81f2-4b40-959f-e831f4d9abf5" (UID: "2ee0724a-81f2-4b40-959f-e831f4d9abf5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.447081 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ee0724a-81f2-4b40-959f-e831f4d9abf5-kube-api-access-s4thd" (OuterVolumeSpecName: "kube-api-access-s4thd") pod "2ee0724a-81f2-4b40-959f-e831f4d9abf5" (UID: "2ee0724a-81f2-4b40-959f-e831f4d9abf5"). InnerVolumeSpecName "kube-api-access-s4thd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.481417 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ee0724a-81f2-4b40-959f-e831f4d9abf5" (UID: "2ee0724a-81f2-4b40-959f-e831f4d9abf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.493207 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-config-data" (OuterVolumeSpecName: "config-data") pod "2ee0724a-81f2-4b40-959f-e831f4d9abf5" (UID: "2ee0724a-81f2-4b40-959f-e831f4d9abf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.534846 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.534874 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.534885 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.534893 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4thd\" (UniqueName: \"kubernetes.io/projected/2ee0724a-81f2-4b40-959f-e831f4d9abf5-kube-api-access-s4thd\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.534904 4941 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2ee0724a-81f2-4b40-959f-e831f4d9abf5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.534912 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ee0724a-81f2-4b40-959f-e831f4d9abf5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.645780 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-5zxfl" event={"ID":"2ee0724a-81f2-4b40-959f-e831f4d9abf5","Type":"ContainerDied","Data":"90251ff78757c44348454b6c0bbdd3bb672228f7c90a57af88f41ff2dbe41077"} Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.646046 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90251ff78757c44348454b6c0bbdd3bb672228f7c90a57af88f41ff2dbe41077" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.646182 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-5zxfl" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.834844 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.944078 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-svc\") pod \"356ab68a-ffce-4827-b044-0bce069c508a\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.947901 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-swift-storage-0\") pod \"356ab68a-ffce-4827-b044-0bce069c508a\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.948068 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-config\") pod \"356ab68a-ffce-4827-b044-0bce069c508a\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.948168 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7r2m\" (UniqueName: \"kubernetes.io/projected/356ab68a-ffce-4827-b044-0bce069c508a-kube-api-access-w7r2m\") pod \"356ab68a-ffce-4827-b044-0bce069c508a\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.948247 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-sb\") pod \"356ab68a-ffce-4827-b044-0bce069c508a\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.948421 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-nb\") pod \"356ab68a-ffce-4827-b044-0bce069c508a\" (UID: \"356ab68a-ffce-4827-b044-0bce069c508a\") " Nov 30 07:05:58 crc kubenswrapper[4941]: I1130 07:05:58.979566 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/356ab68a-ffce-4827-b044-0bce069c508a-kube-api-access-w7r2m" (OuterVolumeSpecName: "kube-api-access-w7r2m") pod "356ab68a-ffce-4827-b044-0bce069c508a" (UID: "356ab68a-ffce-4827-b044-0bce069c508a"). InnerVolumeSpecName "kube-api-access-w7r2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.017246 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "356ab68a-ffce-4827-b044-0bce069c508a" (UID: "356ab68a-ffce-4827-b044-0bce069c508a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.020931 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "356ab68a-ffce-4827-b044-0bce069c508a" (UID: "356ab68a-ffce-4827-b044-0bce069c508a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.028362 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-config" (OuterVolumeSpecName: "config") pod "356ab68a-ffce-4827-b044-0bce069c508a" (UID: "356ab68a-ffce-4827-b044-0bce069c508a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.041585 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "356ab68a-ffce-4827-b044-0bce069c508a" (UID: "356ab68a-ffce-4827-b044-0bce069c508a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.042370 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "356ab68a-ffce-4827-b044-0bce069c508a" (UID: "356ab68a-ffce-4827-b044-0bce069c508a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.051344 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.051537 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.051617 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.051688 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7r2m\" (UniqueName: \"kubernetes.io/projected/356ab68a-ffce-4827-b044-0bce069c508a-kube-api-access-w7r2m\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.051764 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.051832 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/356ab68a-ffce-4827-b044-0bce069c508a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:05:59 crc kubenswrapper[4941]: E1130 07:05:59.129161 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.164942 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.309887 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.579854 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:05:59 crc kubenswrapper[4941]: E1130 07:05:59.580240 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="356ab68a-ffce-4827-b044-0bce069c508a" containerName="dnsmasq-dns" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.580254 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="356ab68a-ffce-4827-b044-0bce069c508a" containerName="dnsmasq-dns" Nov 30 07:05:59 crc kubenswrapper[4941]: E1130 07:05:59.580273 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="356ab68a-ffce-4827-b044-0bce069c508a" containerName="init" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.580279 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="356ab68a-ffce-4827-b044-0bce069c508a" containerName="init" Nov 30 07:05:59 crc kubenswrapper[4941]: E1130 07:05:59.580294 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" containerName="cinder-db-sync" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.580300 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" containerName="cinder-db-sync" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.580475 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="356ab68a-ffce-4827-b044-0bce069c508a" containerName="dnsmasq-dns" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.580488 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" containerName="cinder-db-sync" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.587318 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.591278 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.591528 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.591662 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9mc46" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.591820 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.604935 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.665961 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-scripts\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.666317 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.666592 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2h7w\" (UniqueName: \"kubernetes.io/projected/949d0877-d0ab-4330-8952-dd332aec2f16-kube-api-access-k2h7w\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.666826 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.667001 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.667220 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/949d0877-d0ab-4330-8952-dd332aec2f16-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.672795 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerStarted","Data":"34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452"} Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.673076 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="ceilometer-notification-agent" containerID="cri-o://896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea" gracePeriod=30 Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.673196 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.673603 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="proxy-httpd" containerID="cri-o://34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452" gracePeriod=30 Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.673654 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="sg-core" containerID="cri-o://e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7" gracePeriod=30 Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.686102 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.686640 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f677dd449-r6lks" event={"ID":"356ab68a-ffce-4827-b044-0bce069c508a","Type":"ContainerDied","Data":"b352468f9cddbbbf6f1657032e643be1452f15dfb1b21f9f0817f598eab9a62c"} Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.686715 4941 scope.go:117] "RemoveContainer" containerID="dbb988c2b953070e9157a1f0b178a07271722205af19fcaf631da4c93d90b298" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.740616 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-zxqkn"] Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.762831 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.766848 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-zxqkn"] Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.786412 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.786586 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2h7w\" (UniqueName: \"kubernetes.io/projected/949d0877-d0ab-4330-8952-dd332aec2f16-kube-api-access-k2h7w\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.786802 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.786854 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.786879 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/949d0877-d0ab-4330-8952-dd332aec2f16-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.786989 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-scripts\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.791115 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/949d0877-d0ab-4330-8952-dd332aec2f16-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.819113 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.827420 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-r6lks"] Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.832251 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-scripts\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.843265 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.853767 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.863109 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2h7w\" (UniqueName: \"kubernetes.io/projected/949d0877-d0ab-4330-8952-dd332aec2f16-kube-api-access-k2h7w\") pod \"cinder-scheduler-0\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.868019 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f677dd449-r6lks"] Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.900105 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-swift-storage-0\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.900156 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-config\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.900199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts5lj\" (UniqueName: \"kubernetes.io/projected/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-kube-api-access-ts5lj\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.900255 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-svc\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.900305 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-nb\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.900414 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-sb\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.923875 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.927760 4941 scope.go:117] "RemoveContainer" containerID="0be944b2611c455b256e4373bd28d21687a94e0fd2f3d455292e254a865c9a79" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.978155 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.981021 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.984540 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 30 07:05:59 crc kubenswrapper[4941]: I1130 07:05:59.993238 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.004139 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-swift-storage-0\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.004181 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-config\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.004229 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts5lj\" (UniqueName: \"kubernetes.io/projected/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-kube-api-access-ts5lj\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.004288 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-svc\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.004339 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-nb\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.004431 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-sb\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.005219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-sb\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.005756 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-nb\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.009596 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-config\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.011627 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-svc\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.013191 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-swift-storage-0\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.026680 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts5lj\" (UniqueName: \"kubernetes.io/projected/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-kube-api-access-ts5lj\") pod \"dnsmasq-dns-8ccb5c7cf-zxqkn\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.106555 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.106759 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.106787 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b690508-1059-4ce8-9034-23014064d01c-logs\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.106917 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-scripts\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.107067 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b690508-1059-4ce8-9034-23014064d01c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.107210 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data-custom\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.107384 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjlnk\" (UniqueName: \"kubernetes.io/projected/1b690508-1059-4ce8-9034-23014064d01c-kube-api-access-cjlnk\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.215339 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.215800 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b690508-1059-4ce8-9034-23014064d01c-logs\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.215829 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-scripts\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.215881 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b690508-1059-4ce8-9034-23014064d01c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.215903 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data-custom\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.215938 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjlnk\" (UniqueName: \"kubernetes.io/projected/1b690508-1059-4ce8-9034-23014064d01c-kube-api-access-cjlnk\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.216043 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.216277 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b690508-1059-4ce8-9034-23014064d01c-logs\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.216365 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b690508-1059-4ce8-9034-23014064d01c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.223211 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-scripts\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.224043 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.235197 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.235372 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data-custom\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.235879 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjlnk\" (UniqueName: \"kubernetes.io/projected/1b690508-1059-4ce8-9034-23014064d01c-kube-api-access-cjlnk\") pod \"cinder-api-0\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.271161 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.318299 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.470493 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.703037 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"949d0877-d0ab-4330-8952-dd332aec2f16","Type":"ContainerStarted","Data":"0b6271e8e14281725af466c28bf64807da12763aef14fae0bde28bd47e17ab6a"} Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.717195 4941 generic.go:334] "Generic (PLEG): container finished" podID="20e82100-3d40-4b16-be58-15e74c1aee65" containerID="34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452" exitCode=0 Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.717233 4941 generic.go:334] "Generic (PLEG): container finished" podID="20e82100-3d40-4b16-be58-15e74c1aee65" containerID="e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7" exitCode=2 Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.717254 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerDied","Data":"34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452"} Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.717278 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerDied","Data":"e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7"} Nov 30 07:06:00 crc kubenswrapper[4941]: W1130 07:06:00.899505 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd468fd0f_10b7_46d8_a8f8_ce095f225ff4.slice/crio-f13e9c4f02c4f1cf39d89517a82023c510de18a1e28980af1059c239cd0a51d9 WatchSource:0}: Error finding container f13e9c4f02c4f1cf39d89517a82023c510de18a1e28980af1059c239cd0a51d9: Status 404 returned error can't find the container with id f13e9c4f02c4f1cf39d89517a82023c510de18a1e28980af1059c239cd0a51d9 Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.906744 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-zxqkn"] Nov 30 07:06:00 crc kubenswrapper[4941]: I1130 07:06:00.996496 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:01 crc kubenswrapper[4941]: W1130 07:06:01.000010 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b690508_1059_4ce8_9034_23014064d01c.slice/crio-4a97fed393a777c3dfccd3e2a6c90ed3c0ed27658056ee4b8ede3b8328415be3 WatchSource:0}: Error finding container 4a97fed393a777c3dfccd3e2a6c90ed3c0ed27658056ee4b8ede3b8328415be3: Status 404 returned error can't find the container with id 4a97fed393a777c3dfccd3e2a6c90ed3c0ed27658056ee4b8ede3b8328415be3 Nov 30 07:06:01 crc kubenswrapper[4941]: I1130 07:06:01.549920 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="356ab68a-ffce-4827-b044-0bce069c508a" path="/var/lib/kubelet/pods/356ab68a-ffce-4827-b044-0bce069c508a/volumes" Nov 30 07:06:01 crc kubenswrapper[4941]: I1130 07:06:01.732922 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b690508-1059-4ce8-9034-23014064d01c","Type":"ContainerStarted","Data":"4a97fed393a777c3dfccd3e2a6c90ed3c0ed27658056ee4b8ede3b8328415be3"} Nov 30 07:06:01 crc kubenswrapper[4941]: I1130 07:06:01.737127 4941 generic.go:334] "Generic (PLEG): container finished" podID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerID="a6bd0dc4af0fd797134cd9cdf820d248e4d3f79d4b67f746cf051f0bcda68bb0" exitCode=0 Nov 30 07:06:01 crc kubenswrapper[4941]: I1130 07:06:01.737168 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" event={"ID":"d468fd0f-10b7-46d8-a8f8-ce095f225ff4","Type":"ContainerDied","Data":"a6bd0dc4af0fd797134cd9cdf820d248e4d3f79d4b67f746cf051f0bcda68bb0"} Nov 30 07:06:01 crc kubenswrapper[4941]: I1130 07:06:01.737309 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" event={"ID":"d468fd0f-10b7-46d8-a8f8-ce095f225ff4","Type":"ContainerStarted","Data":"f13e9c4f02c4f1cf39d89517a82023c510de18a1e28980af1059c239cd0a51d9"} Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.167399 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.517175 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.628868 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.691635 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5f6b49b776-wzn74"] Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.692221 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5f6b49b776-wzn74" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api-log" containerID="cri-o://10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382" gracePeriod=30 Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.692675 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5f6b49b776-wzn74" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api" containerID="cri-o://79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8" gracePeriod=30 Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.698433 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f6b49b776-wzn74" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": EOF" Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.762365 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b690508-1059-4ce8-9034-23014064d01c","Type":"ContainerStarted","Data":"3e42afe81d1ede373645db3194928f044f069a410a6cdb30fa97f84d0647fdda"} Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.762406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b690508-1059-4ce8-9034-23014064d01c","Type":"ContainerStarted","Data":"8d2ba2c844810ea8a564d33984c34b62520beb9a2b84a184a69226b07035b4eb"} Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.762525 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api-log" containerID="cri-o://8d2ba2c844810ea8a564d33984c34b62520beb9a2b84a184a69226b07035b4eb" gracePeriod=30 Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.762820 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.763092 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api" containerID="cri-o://3e42afe81d1ede373645db3194928f044f069a410a6cdb30fa97f84d0647fdda" gracePeriod=30 Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.765629 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"949d0877-d0ab-4330-8952-dd332aec2f16","Type":"ContainerStarted","Data":"49e359a68c19b23d439dd0380ce6141b3fe695216416dfda1e629dce9b9c76b5"} Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.787448 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" event={"ID":"d468fd0f-10b7-46d8-a8f8-ce095f225ff4","Type":"ContainerStarted","Data":"62790034a32cbbc4e258498878ac4ba8007d6cbc4ad8bd0e46ee6b59e8cbd336"} Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.787541 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.814005 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" podStartSLOduration=3.813986185 podStartE2EDuration="3.813986185s" podCreationTimestamp="2025-11-30 07:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:02.812569011 +0000 UTC m=+1183.580740620" watchObservedRunningTime="2025-11-30 07:06:02.813986185 +0000 UTC m=+1183.582157794" Nov 30 07:06:02 crc kubenswrapper[4941]: I1130 07:06:02.815506 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.815498532 podStartE2EDuration="3.815498532s" podCreationTimestamp="2025-11-30 07:05:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:02.787238251 +0000 UTC m=+1183.555409870" watchObservedRunningTime="2025-11-30 07:06:02.815498532 +0000 UTC m=+1183.583670141" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.821063 4941 generic.go:334] "Generic (PLEG): container finished" podID="1b690508-1059-4ce8-9034-23014064d01c" containerID="8d2ba2c844810ea8a564d33984c34b62520beb9a2b84a184a69226b07035b4eb" exitCode=143 Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.821533 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b690508-1059-4ce8-9034-23014064d01c","Type":"ContainerDied","Data":"8d2ba2c844810ea8a564d33984c34b62520beb9a2b84a184a69226b07035b4eb"} Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.824343 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.826850 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"949d0877-d0ab-4330-8952-dd332aec2f16","Type":"ContainerStarted","Data":"7a2bc676d642fb0f884dd3d70f46790a87d8d01db1aa0b5d469551188b5dfef1"} Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.833803 4941 generic.go:334] "Generic (PLEG): container finished" podID="20e82100-3d40-4b16-be58-15e74c1aee65" containerID="896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea" exitCode=0 Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.833983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerDied","Data":"896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea"} Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.834008 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20e82100-3d40-4b16-be58-15e74c1aee65","Type":"ContainerDied","Data":"1a7fb7fd578b4aae1d498f809f5473128080ec5630dc7c49196b8290ab5d178d"} Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.834025 4941 scope.go:117] "RemoveContainer" containerID="34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.834242 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.837716 4941 generic.go:334] "Generic (PLEG): container finished" podID="84e0357d-8ac0-467b-883e-04e54d50de54" containerID="10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382" exitCode=143 Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.837821 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b49b776-wzn74" event={"ID":"84e0357d-8ac0-467b-883e-04e54d50de54","Type":"ContainerDied","Data":"10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382"} Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.866120 4941 scope.go:117] "RemoveContainer" containerID="e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.875158 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.071476569 podStartE2EDuration="4.875120646s" podCreationTimestamp="2025-11-30 07:05:59 +0000 UTC" firstStartedPulling="2025-11-30 07:06:00.514037724 +0000 UTC m=+1181.282209343" lastFinishedPulling="2025-11-30 07:06:01.317681811 +0000 UTC m=+1182.085853420" observedRunningTime="2025-11-30 07:06:03.869866933 +0000 UTC m=+1184.638038562" watchObservedRunningTime="2025-11-30 07:06:03.875120646 +0000 UTC m=+1184.643292255" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.899105 4941 scope.go:117] "RemoveContainer" containerID="896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.920857 4941 scope.go:117] "RemoveContainer" containerID="34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921461 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dw8zc\" (UniqueName: \"kubernetes.io/projected/20e82100-3d40-4b16-be58-15e74c1aee65-kube-api-access-dw8zc\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921529 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-combined-ca-bundle\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921600 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-run-httpd\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921630 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-scripts\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921712 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-log-httpd\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921784 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-sg-core-conf-yaml\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.921876 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.922792 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.924074 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:03 crc kubenswrapper[4941]: E1130 07:06:03.921797 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452\": container with ID starting with 34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452 not found: ID does not exist" containerID="34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.924250 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452"} err="failed to get container status \"34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452\": rpc error: code = NotFound desc = could not find container \"34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452\": container with ID starting with 34dbcbebb1050025a3aa368c57e6746a821d8fc05eaf200509e8dbd5ea134452 not found: ID does not exist" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.924281 4941 scope.go:117] "RemoveContainer" containerID="e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.930171 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-scripts" (OuterVolumeSpecName: "scripts") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.931494 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20e82100-3d40-4b16-be58-15e74c1aee65-kube-api-access-dw8zc" (OuterVolumeSpecName: "kube-api-access-dw8zc") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "kube-api-access-dw8zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:03 crc kubenswrapper[4941]: E1130 07:06:03.931645 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7\": container with ID starting with e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7 not found: ID does not exist" containerID="e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.931685 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7"} err="failed to get container status \"e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7\": rpc error: code = NotFound desc = could not find container \"e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7\": container with ID starting with e6c6fb5f0c3d9f3c8ed40e9215524cce422aa79e063ffe966f7ea3234e0c6cb7 not found: ID does not exist" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.931714 4941 scope.go:117] "RemoveContainer" containerID="896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea" Nov 30 07:06:03 crc kubenswrapper[4941]: E1130 07:06:03.934501 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea\": container with ID starting with 896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea not found: ID does not exist" containerID="896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.934550 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea"} err="failed to get container status \"896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea\": rpc error: code = NotFound desc = could not find container \"896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea\": container with ID starting with 896cbda41f0a94493ba73580e5d5919a23a9b26e2c08ac0656186814103207ea not found: ID does not exist" Nov 30 07:06:03 crc kubenswrapper[4941]: I1130 07:06:03.954134 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.023542 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data" (OuterVolumeSpecName: "config-data") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.023645 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data\") pod \"20e82100-3d40-4b16-be58-15e74c1aee65\" (UID: \"20e82100-3d40-4b16-be58-15e74c1aee65\") " Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.024208 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.024219 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.024229 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20e82100-3d40-4b16-be58-15e74c1aee65-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.024238 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.024247 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dw8zc\" (UniqueName: \"kubernetes.io/projected/20e82100-3d40-4b16-be58-15e74c1aee65-kube-api-access-dw8zc\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: W1130 07:06:04.024343 4941 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/20e82100-3d40-4b16-be58-15e74c1aee65/volumes/kubernetes.io~secret/config-data Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.024372 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data" (OuterVolumeSpecName: "config-data") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.031461 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20e82100-3d40-4b16-be58-15e74c1aee65" (UID: "20e82100-3d40-4b16-be58-15e74c1aee65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.126412 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.126461 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e82100-3d40-4b16-be58-15e74c1aee65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.223374 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.239462 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.273990 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:04 crc kubenswrapper[4941]: E1130 07:06:04.274418 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="sg-core" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.274436 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="sg-core" Nov 30 07:06:04 crc kubenswrapper[4941]: E1130 07:06:04.274456 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="proxy-httpd" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.274464 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="proxy-httpd" Nov 30 07:06:04 crc kubenswrapper[4941]: E1130 07:06:04.274494 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="ceilometer-notification-agent" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.274501 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="ceilometer-notification-agent" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.274670 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="proxy-httpd" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.274687 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="sg-core" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.274710 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" containerName="ceilometer-notification-agent" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.284163 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.286918 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.288932 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330136 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5p87\" (UniqueName: \"kubernetes.io/projected/ab604dfb-8fe4-4e98-8652-a98369b5d260-kube-api-access-j5p87\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330266 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330304 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-log-httpd\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330415 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-run-httpd\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330453 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330484 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-scripts\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.330506 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-config-data\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.340273 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.433340 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-run-httpd\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.433896 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.433971 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-scripts\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.434026 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-config-data\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.434101 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-run-httpd\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.434108 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5p87\" (UniqueName: \"kubernetes.io/projected/ab604dfb-8fe4-4e98-8652-a98369b5d260-kube-api-access-j5p87\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.434392 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.434497 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-log-httpd\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.434844 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-log-httpd\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.464399 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.469895 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-config-data\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.472148 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.497112 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5p87\" (UniqueName: \"kubernetes.io/projected/ab604dfb-8fe4-4e98-8652-a98369b5d260-kube-api-access-j5p87\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.498888 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-scripts\") pod \"ceilometer-0\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.628995 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:04 crc kubenswrapper[4941]: I1130 07:06:04.929520 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 30 07:06:05 crc kubenswrapper[4941]: I1130 07:06:05.117505 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:05 crc kubenswrapper[4941]: I1130 07:06:05.533849 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20e82100-3d40-4b16-be58-15e74c1aee65" path="/var/lib/kubelet/pods/20e82100-3d40-4b16-be58-15e74c1aee65/volumes" Nov 30 07:06:05 crc kubenswrapper[4941]: I1130 07:06:05.735708 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:06:05 crc kubenswrapper[4941]: I1130 07:06:05.878608 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerStarted","Data":"242f9300dcca13e597db3d366127dfb67398d0ffaacbeb5fb4bf687800251033"} Nov 30 07:06:06 crc kubenswrapper[4941]: I1130 07:06:06.910619 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerStarted","Data":"2b35414a0896dbaa38ec03ad6db919f7a32c9b9d5f88db656e884592de8bd62d"} Nov 30 07:06:06 crc kubenswrapper[4941]: I1130 07:06:06.911673 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerStarted","Data":"d2081b880a27904af1223737519422538d6db133f4caa53fe25b036dc35f6cb4"} Nov 30 07:06:07 crc kubenswrapper[4941]: I1130 07:06:07.926730 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerStarted","Data":"d665c69b49cc8c4f3a5b3024b78164a30243d365fd7ef133b4450db4a1744d30"} Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.119790 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f6b49b776-wzn74" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:47102->10.217.0.154:9311: read: connection reset by peer" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.119864 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f6b49b776-wzn74" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:47108->10.217.0.154:9311: read: connection reset by peer" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.646560 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.750314 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84e0357d-8ac0-467b-883e-04e54d50de54-logs\") pod \"84e0357d-8ac0-467b-883e-04e54d50de54\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.750410 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data-custom\") pod \"84e0357d-8ac0-467b-883e-04e54d50de54\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.751060 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-combined-ca-bundle\") pod \"84e0357d-8ac0-467b-883e-04e54d50de54\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.751162 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxr7h\" (UniqueName: \"kubernetes.io/projected/84e0357d-8ac0-467b-883e-04e54d50de54-kube-api-access-dxr7h\") pod \"84e0357d-8ac0-467b-883e-04e54d50de54\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.751568 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data\") pod \"84e0357d-8ac0-467b-883e-04e54d50de54\" (UID: \"84e0357d-8ac0-467b-883e-04e54d50de54\") " Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.753893 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "84e0357d-8ac0-467b-883e-04e54d50de54" (UID: "84e0357d-8ac0-467b-883e-04e54d50de54"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.754900 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84e0357d-8ac0-467b-883e-04e54d50de54-logs" (OuterVolumeSpecName: "logs") pod "84e0357d-8ac0-467b-883e-04e54d50de54" (UID: "84e0357d-8ac0-467b-883e-04e54d50de54"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.773095 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84e0357d-8ac0-467b-883e-04e54d50de54-kube-api-access-dxr7h" (OuterVolumeSpecName: "kube-api-access-dxr7h") pod "84e0357d-8ac0-467b-883e-04e54d50de54" (UID: "84e0357d-8ac0-467b-883e-04e54d50de54"). InnerVolumeSpecName "kube-api-access-dxr7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.784126 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "84e0357d-8ac0-467b-883e-04e54d50de54" (UID: "84e0357d-8ac0-467b-883e-04e54d50de54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.802106 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.817421 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data" (OuterVolumeSpecName: "config-data") pod "84e0357d-8ac0-467b-883e-04e54d50de54" (UID: "84e0357d-8ac0-467b-883e-04e54d50de54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.860932 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.861457 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84e0357d-8ac0-467b-883e-04e54d50de54-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.861556 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.861635 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84e0357d-8ac0-467b-883e-04e54d50de54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.861716 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxr7h\" (UniqueName: \"kubernetes.io/projected/84e0357d-8ac0-467b-883e-04e54d50de54-kube-api-access-dxr7h\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.884111 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d79c95d68-mszfh"] Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.884405 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-d79c95d68-mszfh" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-api" containerID="cri-o://0161e266452b785a55d6813c53f14f9ef6eec721c43285425bffea10ab4abfaa" gracePeriod=30 Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.884608 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-d79c95d68-mszfh" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-httpd" containerID="cri-o://d3c6082ab51d39ca0408b622f5558557161f66113397c83e314cb554460603eb" gracePeriod=30 Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.941292 4941 generic.go:334] "Generic (PLEG): container finished" podID="84e0357d-8ac0-467b-883e-04e54d50de54" containerID="79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8" exitCode=0 Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.941349 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b49b776-wzn74" event={"ID":"84e0357d-8ac0-467b-883e-04e54d50de54","Type":"ContainerDied","Data":"79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8"} Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.941376 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6b49b776-wzn74" event={"ID":"84e0357d-8ac0-467b-883e-04e54d50de54","Type":"ContainerDied","Data":"39d241e0029d6995a59bf609c05463595f0a9ecad00320860bf8ce32f895354d"} Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.941393 4941 scope.go:117] "RemoveContainer" containerID="79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.941417 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6b49b776-wzn74" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.971784 4941 scope.go:117] "RemoveContainer" containerID="10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382" Nov 30 07:06:08 crc kubenswrapper[4941]: I1130 07:06:08.989797 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5f6b49b776-wzn74"] Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.007354 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5f6b49b776-wzn74"] Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.030877 4941 scope.go:117] "RemoveContainer" containerID="79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8" Nov 30 07:06:09 crc kubenswrapper[4941]: E1130 07:06:09.036523 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8\": container with ID starting with 79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8 not found: ID does not exist" containerID="79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8" Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.036588 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8"} err="failed to get container status \"79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8\": rpc error: code = NotFound desc = could not find container \"79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8\": container with ID starting with 79654e681cd66e871623ffa26ba416ebba07b2c5fb45ef8f7ffaa15bdbc59ba8 not found: ID does not exist" Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.036622 4941 scope.go:117] "RemoveContainer" containerID="10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382" Nov 30 07:06:09 crc kubenswrapper[4941]: E1130 07:06:09.038108 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382\": container with ID starting with 10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382 not found: ID does not exist" containerID="10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382" Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.038130 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382"} err="failed to get container status \"10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382\": rpc error: code = NotFound desc = could not find container \"10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382\": container with ID starting with 10c8c126a36e012c79fc9fea74f38ba3efa69e128e3bba08fc27db69ad415382 not found: ID does not exist" Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.566032 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" path="/var/lib/kubelet/pods/84e0357d-8ac0-467b-883e-04e54d50de54/volumes" Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.952989 4941 generic.go:334] "Generic (PLEG): container finished" podID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerID="d3c6082ab51d39ca0408b622f5558557161f66113397c83e314cb554460603eb" exitCode=0 Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.953069 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d79c95d68-mszfh" event={"ID":"14e3ee9e-71a8-4ebe-843a-a9b875995aea","Type":"ContainerDied","Data":"d3c6082ab51d39ca0408b622f5558557161f66113397c83e314cb554460603eb"} Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.956615 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerStarted","Data":"c0e61a8d6e8681fca905ebcb523b95149808986e1480520d3172dd03109c9c5b"} Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.957595 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:06:09 crc kubenswrapper[4941]: I1130 07:06:09.980024 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.396572409 podStartE2EDuration="5.980006571s" podCreationTimestamp="2025-11-30 07:06:04 +0000 UTC" firstStartedPulling="2025-11-30 07:06:05.123822663 +0000 UTC m=+1185.891994272" lastFinishedPulling="2025-11-30 07:06:08.707256825 +0000 UTC m=+1189.475428434" observedRunningTime="2025-11-30 07:06:09.979582008 +0000 UTC m=+1190.747753627" watchObservedRunningTime="2025-11-30 07:06:09.980006571 +0000 UTC m=+1190.748178180" Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.178440 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.249925 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.272452 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.341082 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-d492g"] Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.341559 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerName="dnsmasq-dns" containerID="cri-o://453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414" gracePeriod=10 Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.908971 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968183 4941 generic.go:334] "Generic (PLEG): container finished" podID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerID="453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414" exitCode=0 Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968231 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" event={"ID":"524a3388-a23b-4d06-a79c-56c5dfb3107d","Type":"ContainerDied","Data":"453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414"} Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968301 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" event={"ID":"524a3388-a23b-4d06-a79c-56c5dfb3107d","Type":"ContainerDied","Data":"7909fde426838449ed9fbc670a19d666bb48cf91056352c3481356266f23fdba"} Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968308 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64dfd64c45-d492g" Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968340 4941 scope.go:117] "RemoveContainer" containerID="453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414" Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968403 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="cinder-scheduler" containerID="cri-o://49e359a68c19b23d439dd0380ce6141b3fe695216416dfda1e629dce9b9c76b5" gracePeriod=30 Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.968588 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="probe" containerID="cri-o://7a2bc676d642fb0f884dd3d70f46790a87d8d01db1aa0b5d469551188b5dfef1" gracePeriod=30 Nov 30 07:06:10 crc kubenswrapper[4941]: I1130 07:06:10.993430 4941 scope.go:117] "RemoveContainer" containerID="61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.004516 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-nb\") pod \"524a3388-a23b-4d06-a79c-56c5dfb3107d\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.004609 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-svc\") pod \"524a3388-a23b-4d06-a79c-56c5dfb3107d\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.004673 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ztp9\" (UniqueName: \"kubernetes.io/projected/524a3388-a23b-4d06-a79c-56c5dfb3107d-kube-api-access-4ztp9\") pod \"524a3388-a23b-4d06-a79c-56c5dfb3107d\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.004824 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-sb\") pod \"524a3388-a23b-4d06-a79c-56c5dfb3107d\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.004853 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-swift-storage-0\") pod \"524a3388-a23b-4d06-a79c-56c5dfb3107d\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.004871 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-config\") pod \"524a3388-a23b-4d06-a79c-56c5dfb3107d\" (UID: \"524a3388-a23b-4d06-a79c-56c5dfb3107d\") " Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.021826 4941 scope.go:117] "RemoveContainer" containerID="453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414" Nov 30 07:06:11 crc kubenswrapper[4941]: E1130 07:06:11.023796 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414\": container with ID starting with 453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414 not found: ID does not exist" containerID="453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.023859 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414"} err="failed to get container status \"453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414\": rpc error: code = NotFound desc = could not find container \"453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414\": container with ID starting with 453d5344c0ddf0e9f23b51443b4c8a514f8f13a07a5809dbde4485015b449414 not found: ID does not exist" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.023889 4941 scope.go:117] "RemoveContainer" containerID="61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2" Nov 30 07:06:11 crc kubenswrapper[4941]: E1130 07:06:11.024294 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2\": container with ID starting with 61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2 not found: ID does not exist" containerID="61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.024315 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2"} err="failed to get container status \"61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2\": rpc error: code = NotFound desc = could not find container \"61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2\": container with ID starting with 61ab52c2fc176084b5d837fa57c50b67b8e684f2764cf2ebffbaa4b2f2143bb2 not found: ID does not exist" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.026296 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/524a3388-a23b-4d06-a79c-56c5dfb3107d-kube-api-access-4ztp9" (OuterVolumeSpecName: "kube-api-access-4ztp9") pod "524a3388-a23b-4d06-a79c-56c5dfb3107d" (UID: "524a3388-a23b-4d06-a79c-56c5dfb3107d"). InnerVolumeSpecName "kube-api-access-4ztp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.069609 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-config" (OuterVolumeSpecName: "config") pod "524a3388-a23b-4d06-a79c-56c5dfb3107d" (UID: "524a3388-a23b-4d06-a79c-56c5dfb3107d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.078310 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "524a3388-a23b-4d06-a79c-56c5dfb3107d" (UID: "524a3388-a23b-4d06-a79c-56c5dfb3107d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.084970 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "524a3388-a23b-4d06-a79c-56c5dfb3107d" (UID: "524a3388-a23b-4d06-a79c-56c5dfb3107d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.088496 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "524a3388-a23b-4d06-a79c-56c5dfb3107d" (UID: "524a3388-a23b-4d06-a79c-56c5dfb3107d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.107220 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.107265 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ztp9\" (UniqueName: \"kubernetes.io/projected/524a3388-a23b-4d06-a79c-56c5dfb3107d-kube-api-access-4ztp9\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.107280 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.107292 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.107302 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.114592 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "524a3388-a23b-4d06-a79c-56c5dfb3107d" (UID: "524a3388-a23b-4d06-a79c-56c5dfb3107d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.216191 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/524a3388-a23b-4d06-a79c-56c5dfb3107d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.304269 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-d492g"] Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.311878 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64dfd64c45-d492g"] Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.550421 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" path="/var/lib/kubelet/pods/524a3388-a23b-4d06-a79c-56c5dfb3107d/volumes" Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.980206 4941 generic.go:334] "Generic (PLEG): container finished" podID="949d0877-d0ab-4330-8952-dd332aec2f16" containerID="7a2bc676d642fb0f884dd3d70f46790a87d8d01db1aa0b5d469551188b5dfef1" exitCode=0 Nov 30 07:06:11 crc kubenswrapper[4941]: I1130 07:06:11.980284 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"949d0877-d0ab-4330-8952-dd332aec2f16","Type":"ContainerDied","Data":"7a2bc676d642fb0f884dd3d70f46790a87d8d01db1aa0b5d469551188b5dfef1"} Nov 30 07:06:12 crc kubenswrapper[4941]: I1130 07:06:12.401646 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.012711 4941 generic.go:334] "Generic (PLEG): container finished" podID="949d0877-d0ab-4330-8952-dd332aec2f16" containerID="49e359a68c19b23d439dd0380ce6141b3fe695216416dfda1e629dce9b9c76b5" exitCode=0 Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.013560 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"949d0877-d0ab-4330-8952-dd332aec2f16","Type":"ContainerDied","Data":"49e359a68c19b23d439dd0380ce6141b3fe695216416dfda1e629dce9b9c76b5"} Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.016876 4941 generic.go:334] "Generic (PLEG): container finished" podID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerID="0161e266452b785a55d6813c53f14f9ef6eec721c43285425bffea10ab4abfaa" exitCode=0 Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.016922 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d79c95d68-mszfh" event={"ID":"14e3ee9e-71a8-4ebe-843a-a9b875995aea","Type":"ContainerDied","Data":"0161e266452b785a55d6813c53f14f9ef6eec721c43285425bffea10ab4abfaa"} Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.096967 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.170559 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.211988 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf82l\" (UniqueName: \"kubernetes.io/projected/14e3ee9e-71a8-4ebe-843a-a9b875995aea-kube-api-access-xf82l\") pod \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212030 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-httpd-config\") pod \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212073 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2h7w\" (UniqueName: \"kubernetes.io/projected/949d0877-d0ab-4330-8952-dd332aec2f16-kube-api-access-k2h7w\") pod \"949d0877-d0ab-4330-8952-dd332aec2f16\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212115 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/949d0877-d0ab-4330-8952-dd332aec2f16-etc-machine-id\") pod \"949d0877-d0ab-4330-8952-dd332aec2f16\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212181 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-config\") pod \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212208 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data-custom\") pod \"949d0877-d0ab-4330-8952-dd332aec2f16\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212259 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-combined-ca-bundle\") pod \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212256 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/949d0877-d0ab-4330-8952-dd332aec2f16-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "949d0877-d0ab-4330-8952-dd332aec2f16" (UID: "949d0877-d0ab-4330-8952-dd332aec2f16"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212341 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-combined-ca-bundle\") pod \"949d0877-d0ab-4330-8952-dd332aec2f16\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212371 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data\") pod \"949d0877-d0ab-4330-8952-dd332aec2f16\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212407 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-ovndb-tls-certs\") pod \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\" (UID: \"14e3ee9e-71a8-4ebe-843a-a9b875995aea\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212446 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-scripts\") pod \"949d0877-d0ab-4330-8952-dd332aec2f16\" (UID: \"949d0877-d0ab-4330-8952-dd332aec2f16\") " Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.212830 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/949d0877-d0ab-4330-8952-dd332aec2f16-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.221847 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "949d0877-d0ab-4330-8952-dd332aec2f16" (UID: "949d0877-d0ab-4330-8952-dd332aec2f16"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.221899 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-scripts" (OuterVolumeSpecName: "scripts") pod "949d0877-d0ab-4330-8952-dd332aec2f16" (UID: "949d0877-d0ab-4330-8952-dd332aec2f16"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.225655 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14e3ee9e-71a8-4ebe-843a-a9b875995aea-kube-api-access-xf82l" (OuterVolumeSpecName: "kube-api-access-xf82l") pod "14e3ee9e-71a8-4ebe-843a-a9b875995aea" (UID: "14e3ee9e-71a8-4ebe-843a-a9b875995aea"). InnerVolumeSpecName "kube-api-access-xf82l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.225771 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/949d0877-d0ab-4330-8952-dd332aec2f16-kube-api-access-k2h7w" (OuterVolumeSpecName: "kube-api-access-k2h7w") pod "949d0877-d0ab-4330-8952-dd332aec2f16" (UID: "949d0877-d0ab-4330-8952-dd332aec2f16"). InnerVolumeSpecName "kube-api-access-k2h7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.228639 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "14e3ee9e-71a8-4ebe-843a-a9b875995aea" (UID: "14e3ee9e-71a8-4ebe-843a-a9b875995aea"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.270786 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-config" (OuterVolumeSpecName: "config") pod "14e3ee9e-71a8-4ebe-843a-a9b875995aea" (UID: "14e3ee9e-71a8-4ebe-843a-a9b875995aea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.271133 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "949d0877-d0ab-4330-8952-dd332aec2f16" (UID: "949d0877-d0ab-4330-8952-dd332aec2f16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.275086 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14e3ee9e-71a8-4ebe-843a-a9b875995aea" (UID: "14e3ee9e-71a8-4ebe-843a-a9b875995aea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316058 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf82l\" (UniqueName: \"kubernetes.io/projected/14e3ee9e-71a8-4ebe-843a-a9b875995aea-kube-api-access-xf82l\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316091 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316104 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2h7w\" (UniqueName: \"kubernetes.io/projected/949d0877-d0ab-4330-8952-dd332aec2f16-kube-api-access-k2h7w\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316113 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316121 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316129 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316136 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.316145 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.321257 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "14e3ee9e-71a8-4ebe-843a-a9b875995aea" (UID: "14e3ee9e-71a8-4ebe-843a-a9b875995aea"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.360526 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data" (OuterVolumeSpecName: "config-data") pod "949d0877-d0ab-4330-8952-dd332aec2f16" (UID: "949d0877-d0ab-4330-8952-dd332aec2f16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.417682 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/949d0877-d0ab-4330-8952-dd332aec2f16-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:14 crc kubenswrapper[4941]: I1130 07:06:14.417729 4941 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14e3ee9e-71a8-4ebe-843a-a9b875995aea-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.030866 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-d79c95d68-mszfh" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.030909 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-d79c95d68-mszfh" event={"ID":"14e3ee9e-71a8-4ebe-843a-a9b875995aea","Type":"ContainerDied","Data":"b036ab08e61a7b3b38949fe7f60f1b7c6b71faee28523b50daa9365a68d18bb5"} Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.032195 4941 scope.go:117] "RemoveContainer" containerID="d3c6082ab51d39ca0408b622f5558557161f66113397c83e314cb554460603eb" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.039704 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"949d0877-d0ab-4330-8952-dd332aec2f16","Type":"ContainerDied","Data":"0b6271e8e14281725af466c28bf64807da12763aef14fae0bde28bd47e17ab6a"} Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.039759 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.086687 4941 scope.go:117] "RemoveContainer" containerID="0161e266452b785a55d6813c53f14f9ef6eec721c43285425bffea10ab4abfaa" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.100972 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.121218 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.127713 4941 scope.go:117] "RemoveContainer" containerID="7a2bc676d642fb0f884dd3d70f46790a87d8d01db1aa0b5d469551188b5dfef1" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.135126 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-d79c95d68-mszfh"] Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.157574 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-d79c95d68-mszfh"] Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.171801 4941 scope.go:117] "RemoveContainer" containerID="49e359a68c19b23d439dd0380ce6141b3fe695216416dfda1e629dce9b9c76b5" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.171968 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172462 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerName="dnsmasq-dns" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172479 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerName="dnsmasq-dns" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172496 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-api" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172503 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-api" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172519 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="probe" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172525 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="probe" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172534 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerName="init" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172540 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerName="init" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172559 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-httpd" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172565 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-httpd" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172575 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api-log" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172582 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api-log" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172601 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="cinder-scheduler" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172607 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="cinder-scheduler" Nov 30 07:06:15 crc kubenswrapper[4941]: E1130 07:06:15.172620 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172626 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172775 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-httpd" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172784 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="cinder-scheduler" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172797 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" containerName="probe" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172817 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api-log" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172825 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="524a3388-a23b-4d06-a79c-56c5dfb3107d" containerName="dnsmasq-dns" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172836 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="84e0357d-8ac0-467b-883e-04e54d50de54" containerName="barbican-api" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.172850 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" containerName="neutron-api" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.173769 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.178957 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.179643 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.234992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.235066 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-scripts\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.235106 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr287\" (UniqueName: \"kubernetes.io/projected/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-kube-api-access-kr287\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.235169 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.235215 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.235276 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347017 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347132 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347095 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347230 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347273 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347300 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-scripts\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.347348 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr287\" (UniqueName: \"kubernetes.io/projected/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-kube-api-access-kr287\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.351369 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.351581 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.351967 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-scripts\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.369981 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.373770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr287\" (UniqueName: \"kubernetes.io/projected/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-kube-api-access-kr287\") pod \"cinder-scheduler-0\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.499909 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.533105 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14e3ee9e-71a8-4ebe-843a-a9b875995aea" path="/var/lib/kubelet/pods/14e3ee9e-71a8-4ebe-843a-a9b875995aea/volumes" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.534205 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="949d0877-d0ab-4330-8952-dd332aec2f16" path="/var/lib/kubelet/pods/949d0877-d0ab-4330-8952-dd332aec2f16/volumes" Nov 30 07:06:15 crc kubenswrapper[4941]: I1130 07:06:15.962804 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:06:16 crc kubenswrapper[4941]: I1130 07:06:16.062584 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a","Type":"ContainerStarted","Data":"ce0bfd1f85bac61182bbbc96627d218d0ad3739821fdb809fb08f1b7ad819d33"} Nov 30 07:06:17 crc kubenswrapper[4941]: I1130 07:06:17.074206 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a","Type":"ContainerStarted","Data":"a96ce6ca3db082f07bc30a85b2dd3ed276f669d63c74e2397c8b08ebfa4f983e"} Nov 30 07:06:17 crc kubenswrapper[4941]: I1130 07:06:17.564646 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:06:17 crc kubenswrapper[4941]: I1130 07:06:17.575426 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:06:18 crc kubenswrapper[4941]: I1130 07:06:18.086772 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a","Type":"ContainerStarted","Data":"901debcd5ec58b9fd628099c8ca6fab3d5432f5cf3c027e27b344f81cf8d7260"} Nov 30 07:06:18 crc kubenswrapper[4941]: I1130 07:06:18.120516 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.120499067 podStartE2EDuration="3.120499067s" podCreationTimestamp="2025-11-30 07:06:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:18.112095405 +0000 UTC m=+1198.880267014" watchObservedRunningTime="2025-11-30 07:06:18.120499067 +0000 UTC m=+1198.888670676" Nov 30 07:06:18 crc kubenswrapper[4941]: I1130 07:06:18.230652 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:06:20 crc kubenswrapper[4941]: I1130 07:06:20.500506 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.356268 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7dc97f7589-7ghf8"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.362812 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.366169 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.366514 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.366816 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.380288 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7dc97f7589-7ghf8"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.382994 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmvxt\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-kube-api-access-rmvxt\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383041 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-etc-swift\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383071 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-config-data\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383090 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-public-tls-certs\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383119 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-combined-ca-bundle\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383232 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-log-httpd\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383293 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-run-httpd\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.383383 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-internal-tls-certs\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485569 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-config-data\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485614 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-public-tls-certs\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-combined-ca-bundle\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485717 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-log-httpd\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485771 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-run-httpd\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485809 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-internal-tls-certs\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485875 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmvxt\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-kube-api-access-rmvxt\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.485907 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-etc-swift\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.487999 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-log-httpd\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.488261 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-run-httpd\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.496588 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-combined-ca-bundle\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.496617 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-etc-swift\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.498196 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-config-data\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.507447 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmvxt\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-kube-api-access-rmvxt\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.509361 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-internal-tls-certs\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.512163 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-public-tls-certs\") pod \"swift-proxy-7dc97f7589-7ghf8\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.619570 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-j62c8"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.621077 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.632010 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-j62c8"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.680232 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.681055 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-wgg9g"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.682184 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.689655 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmdhm\" (UniqueName: \"kubernetes.io/projected/c5c83930-89f1-45d1-827f-1584ee8ce557-kube-api-access-qmdhm\") pod \"nova-api-db-create-j62c8\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.689714 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e76ecf6f-9843-4077-b8ff-602840dac5af-operator-scripts\") pod \"nova-cell0-db-create-wgg9g\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.689764 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2np8h\" (UniqueName: \"kubernetes.io/projected/e76ecf6f-9843-4077-b8ff-602840dac5af-kube-api-access-2np8h\") pod \"nova-cell0-db-create-wgg9g\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.689810 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5c83930-89f1-45d1-827f-1584ee8ce557-operator-scripts\") pod \"nova-api-db-create-j62c8\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.692511 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wgg9g"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.777637 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-2ee8-account-create-update-56k92"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.779023 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.782666 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.792430 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5c83930-89f1-45d1-827f-1584ee8ce557-operator-scripts\") pod \"nova-api-db-create-j62c8\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.792535 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmdhm\" (UniqueName: \"kubernetes.io/projected/c5c83930-89f1-45d1-827f-1584ee8ce557-kube-api-access-qmdhm\") pod \"nova-api-db-create-j62c8\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.792586 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed42e7e-ed1d-4463-8088-3d60e06dd00e-operator-scripts\") pod \"nova-api-2ee8-account-create-update-56k92\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.792612 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e76ecf6f-9843-4077-b8ff-602840dac5af-operator-scripts\") pod \"nova-cell0-db-create-wgg9g\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.792653 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxjns\" (UniqueName: \"kubernetes.io/projected/fed42e7e-ed1d-4463-8088-3d60e06dd00e-kube-api-access-jxjns\") pod \"nova-api-2ee8-account-create-update-56k92\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.792700 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2np8h\" (UniqueName: \"kubernetes.io/projected/e76ecf6f-9843-4077-b8ff-602840dac5af-kube-api-access-2np8h\") pod \"nova-cell0-db-create-wgg9g\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.793315 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5c83930-89f1-45d1-827f-1584ee8ce557-operator-scripts\") pod \"nova-api-db-create-j62c8\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.793711 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e76ecf6f-9843-4077-b8ff-602840dac5af-operator-scripts\") pod \"nova-cell0-db-create-wgg9g\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.799538 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2ee8-account-create-update-56k92"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.818102 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmdhm\" (UniqueName: \"kubernetes.io/projected/c5c83930-89f1-45d1-827f-1584ee8ce557-kube-api-access-qmdhm\") pod \"nova-api-db-create-j62c8\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.821591 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2np8h\" (UniqueName: \"kubernetes.io/projected/e76ecf6f-9843-4077-b8ff-602840dac5af-kube-api-access-2np8h\") pod \"nova-cell0-db-create-wgg9g\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.886881 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-4qtbv"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.888430 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.894166 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed42e7e-ed1d-4463-8088-3d60e06dd00e-operator-scripts\") pod \"nova-api-2ee8-account-create-update-56k92\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.894914 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed42e7e-ed1d-4463-8088-3d60e06dd00e-operator-scripts\") pod \"nova-api-2ee8-account-create-update-56k92\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.895006 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxjns\" (UniqueName: \"kubernetes.io/projected/fed42e7e-ed1d-4463-8088-3d60e06dd00e-kube-api-access-jxjns\") pod \"nova-api-2ee8-account-create-update-56k92\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.910471 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-4qtbv"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.910937 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxjns\" (UniqueName: \"kubernetes.io/projected/fed42e7e-ed1d-4463-8088-3d60e06dd00e-kube-api-access-jxjns\") pod \"nova-api-2ee8-account-create-update-56k92\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.940392 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.985979 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0ef1-account-create-update-w7lxz"] Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.988377 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.991162 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.997311 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-operator-scripts\") pod \"nova-cell0-0ef1-account-create-update-w7lxz\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.997365 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-operator-scripts\") pod \"nova-cell1-db-create-4qtbv\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.997465 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgw4t\" (UniqueName: \"kubernetes.io/projected/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-kube-api-access-mgw4t\") pod \"nova-cell0-0ef1-account-create-update-w7lxz\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.997512 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm2kh\" (UniqueName: \"kubernetes.io/projected/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-kube-api-access-bm2kh\") pod \"nova-cell1-db-create-4qtbv\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:22 crc kubenswrapper[4941]: I1130 07:06:22.999799 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.006000 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0ef1-account-create-update-w7lxz"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.099203 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm2kh\" (UniqueName: \"kubernetes.io/projected/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-kube-api-access-bm2kh\") pod \"nova-cell1-db-create-4qtbv\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.099292 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-operator-scripts\") pod \"nova-cell0-0ef1-account-create-update-w7lxz\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.099331 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-operator-scripts\") pod \"nova-cell1-db-create-4qtbv\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.099419 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgw4t\" (UniqueName: \"kubernetes.io/projected/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-kube-api-access-mgw4t\") pod \"nova-cell0-0ef1-account-create-update-w7lxz\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.100868 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-operator-scripts\") pod \"nova-cell0-0ef1-account-create-update-w7lxz\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.101071 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-operator-scripts\") pod \"nova-cell1-db-create-4qtbv\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.121549 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgw4t\" (UniqueName: \"kubernetes.io/projected/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-kube-api-access-mgw4t\") pod \"nova-cell0-0ef1-account-create-update-w7lxz\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.121694 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm2kh\" (UniqueName: \"kubernetes.io/projected/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-kube-api-access-bm2kh\") pod \"nova-cell1-db-create-4qtbv\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.165359 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.195679 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-77d0-account-create-update-25gtl"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.197145 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.199237 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.209811 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-77d0-account-create-update-25gtl"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.216961 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.303738 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g6gj\" (UniqueName: \"kubernetes.io/projected/ddbc8aeb-8359-4427-843f-c6e2377e2857-kube-api-access-7g6gj\") pod \"nova-cell1-77d0-account-create-update-25gtl\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.303797 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbc8aeb-8359-4427-843f-c6e2377e2857-operator-scripts\") pod \"nova-cell1-77d0-account-create-update-25gtl\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.305139 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.305981 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7dc97f7589-7ghf8"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.383685 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.385278 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.392899 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.393134 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.393696 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-zl67z" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.407738 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjc2d\" (UniqueName: \"kubernetes.io/projected/5679f4ed-6882-4f85-93b2-02ccff357b48-kube-api-access-hjc2d\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.407785 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.407853 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g6gj\" (UniqueName: \"kubernetes.io/projected/ddbc8aeb-8359-4427-843f-c6e2377e2857-kube-api-access-7g6gj\") pod \"nova-cell1-77d0-account-create-update-25gtl\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.407884 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbc8aeb-8359-4427-843f-c6e2377e2857-operator-scripts\") pod \"nova-cell1-77d0-account-create-update-25gtl\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.409556 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config-secret\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.409619 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.410425 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbc8aeb-8359-4427-843f-c6e2377e2857-operator-scripts\") pod \"nova-cell1-77d0-account-create-update-25gtl\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.424430 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.434010 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g6gj\" (UniqueName: \"kubernetes.io/projected/ddbc8aeb-8359-4427-843f-c6e2377e2857-kube-api-access-7g6gj\") pod \"nova-cell1-77d0-account-create-update-25gtl\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.514531 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config-secret\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.514912 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.514979 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjc2d\" (UniqueName: \"kubernetes.io/projected/5679f4ed-6882-4f85-93b2-02ccff357b48-kube-api-access-hjc2d\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.515001 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.516099 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.516722 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wgg9g"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.519871 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-combined-ca-bundle\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.520008 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.524760 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config-secret\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.533551 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjc2d\" (UniqueName: \"kubernetes.io/projected/5679f4ed-6882-4f85-93b2-02ccff357b48-kube-api-access-hjc2d\") pod \"openstackclient\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.656880 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-j62c8"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.724662 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.747953 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.748296 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-central-agent" containerID="cri-o://d2081b880a27904af1223737519422538d6db133f4caa53fe25b036dc35f6cb4" gracePeriod=30 Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.748986 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="proxy-httpd" containerID="cri-o://c0e61a8d6e8681fca905ebcb523b95149808986e1480520d3172dd03109c9c5b" gracePeriod=30 Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.749058 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="sg-core" containerID="cri-o://d665c69b49cc8c4f3a5b3024b78164a30243d365fd7ef133b4450db4a1744d30" gracePeriod=30 Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.749134 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-notification-agent" containerID="cri-o://2b35414a0896dbaa38ec03ad6db919f7a32c9b9d5f88db656e884592de8bd62d" gracePeriod=30 Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.764344 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.159:3000/\": EOF" Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.775630 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-2ee8-account-create-update-56k92"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.848084 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-4qtbv"] Nov 30 07:06:23 crc kubenswrapper[4941]: I1130 07:06:23.982624 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0ef1-account-create-update-w7lxz"] Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.118968 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-77d0-account-create-update-25gtl"] Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.169105 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j62c8" event={"ID":"c5c83930-89f1-45d1-827f-1584ee8ce557","Type":"ContainerStarted","Data":"fc82cd08bae8260c63d177cb520a3ad485ad5481cded0679001673d3f6da3c35"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.169157 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j62c8" event={"ID":"c5c83930-89f1-45d1-827f-1584ee8ce557","Type":"ContainerStarted","Data":"4502c61dc38e7b051e14649fd3898e023236b6c88a046e42d18cd0a6f72d6b8d"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.175922 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4qtbv" event={"ID":"a3f11ab2-1455-484c-8d3d-a09bf34a6f72","Type":"ContainerStarted","Data":"a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.175951 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4qtbv" event={"ID":"a3f11ab2-1455-484c-8d3d-a09bf34a6f72","Type":"ContainerStarted","Data":"5feed6eb2cb532f2c54ced833758c2e5fa1a92cfe4c4490ed9f05975bb51578c"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.197583 4941 generic.go:334] "Generic (PLEG): container finished" podID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerID="c0e61a8d6e8681fca905ebcb523b95149808986e1480520d3172dd03109c9c5b" exitCode=0 Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.197626 4941 generic.go:334] "Generic (PLEG): container finished" podID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerID="d665c69b49cc8c4f3a5b3024b78164a30243d365fd7ef133b4450db4a1744d30" exitCode=2 Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.197703 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerDied","Data":"c0e61a8d6e8681fca905ebcb523b95149808986e1480520d3172dd03109c9c5b"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.197736 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerDied","Data":"d665c69b49cc8c4f3a5b3024b78164a30243d365fd7ef133b4450db4a1744d30"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.210032 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7dc97f7589-7ghf8" event={"ID":"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b","Type":"ContainerStarted","Data":"1b70a2a767c9e03c6d699f586af8bd64fc30e67891d157e6de0aa7801367dc48"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.210081 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7dc97f7589-7ghf8" event={"ID":"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b","Type":"ContainerStarted","Data":"14ec24140a57aaa464ef55ac41b92f4dade8d46043b44e8eacba548db02e51ba"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.213588 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" event={"ID":"66dbaf24-8e9d-4981-a37c-561c8c7e98aa","Type":"ContainerStarted","Data":"2b5e93b4ec2b43ad9b7911aef94749fc8f14d340068b4603dda766c05c5d2c42"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.218586 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-4qtbv" podStartSLOduration=2.218566388 podStartE2EDuration="2.218566388s" podCreationTimestamp="2025-11-30 07:06:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:24.198320017 +0000 UTC m=+1204.966491626" watchObservedRunningTime="2025-11-30 07:06:24.218566388 +0000 UTC m=+1204.986737997" Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.222983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2ee8-account-create-update-56k92" event={"ID":"fed42e7e-ed1d-4463-8088-3d60e06dd00e","Type":"ContainerStarted","Data":"f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.223024 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2ee8-account-create-update-56k92" event={"ID":"fed42e7e-ed1d-4463-8088-3d60e06dd00e","Type":"ContainerStarted","Data":"959571513a56052738045d81f092b8e4cb9be22319648c2cb7305327ede1a54e"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.230370 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wgg9g" event={"ID":"e76ecf6f-9843-4077-b8ff-602840dac5af","Type":"ContainerStarted","Data":"8341e11286820a4a80b6cab481b2607c6de18d38c5dd607d7d9d36f09e505c21"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.230406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wgg9g" event={"ID":"e76ecf6f-9843-4077-b8ff-602840dac5af","Type":"ContainerStarted","Data":"39cddfbdac82b3d27cff9f33d96a5f1cbd4172e6c25284119e394c097c97cbdd"} Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.263257 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-2ee8-account-create-update-56k92" podStartSLOduration=2.263236691 podStartE2EDuration="2.263236691s" podCreationTimestamp="2025-11-30 07:06:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:24.244032772 +0000 UTC m=+1205.012204381" watchObservedRunningTime="2025-11-30 07:06:24.263236691 +0000 UTC m=+1205.031408300" Nov 30 07:06:24 crc kubenswrapper[4941]: I1130 07:06:24.312629 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.246636 4941 generic.go:334] "Generic (PLEG): container finished" podID="66dbaf24-8e9d-4981-a37c-561c8c7e98aa" containerID="a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.246825 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" event={"ID":"66dbaf24-8e9d-4981-a37c-561c8c7e98aa","Type":"ContainerDied","Data":"a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.247982 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5679f4ed-6882-4f85-93b2-02ccff357b48","Type":"ContainerStarted","Data":"12cacfb90b9efaedde16dd2d07674b00016d09e7d39e53b8e57af36e0dd0e74c"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.274151 4941 generic.go:334] "Generic (PLEG): container finished" podID="e76ecf6f-9843-4077-b8ff-602840dac5af" containerID="8341e11286820a4a80b6cab481b2607c6de18d38c5dd607d7d9d36f09e505c21" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.274258 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wgg9g" event={"ID":"e76ecf6f-9843-4077-b8ff-602840dac5af","Type":"ContainerDied","Data":"8341e11286820a4a80b6cab481b2607c6de18d38c5dd607d7d9d36f09e505c21"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.293677 4941 generic.go:334] "Generic (PLEG): container finished" podID="a3f11ab2-1455-484c-8d3d-a09bf34a6f72" containerID="a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.293770 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4qtbv" event={"ID":"a3f11ab2-1455-484c-8d3d-a09bf34a6f72","Type":"ContainerDied","Data":"a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.340624 4941 generic.go:334] "Generic (PLEG): container finished" podID="ddbc8aeb-8359-4427-843f-c6e2377e2857" containerID="a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.341197 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" event={"ID":"ddbc8aeb-8359-4427-843f-c6e2377e2857","Type":"ContainerDied","Data":"a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.341226 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" event={"ID":"ddbc8aeb-8359-4427-843f-c6e2377e2857","Type":"ContainerStarted","Data":"34665f41922f3fd9d687c9f8daa2df914313ffa9aceb7e3c322e7d30bfc86ba0"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.401524 4941 generic.go:334] "Generic (PLEG): container finished" podID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerID="d2081b880a27904af1223737519422538d6db133f4caa53fe25b036dc35f6cb4" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.401607 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerDied","Data":"d2081b880a27904af1223737519422538d6db133f4caa53fe25b036dc35f6cb4"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.415023 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7dc97f7589-7ghf8" event={"ID":"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b","Type":"ContainerStarted","Data":"f817ede79334101b289bba0c90e92e4faabbe95f8192168498ec3ad0a4f103ef"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.416130 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.417907 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.423621 4941 generic.go:334] "Generic (PLEG): container finished" podID="c5c83930-89f1-45d1-827f-1584ee8ce557" containerID="fc82cd08bae8260c63d177cb520a3ad485ad5481cded0679001673d3f6da3c35" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.423696 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j62c8" event={"ID":"c5c83930-89f1-45d1-827f-1584ee8ce557","Type":"ContainerDied","Data":"fc82cd08bae8260c63d177cb520a3ad485ad5481cded0679001673d3f6da3c35"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.427453 4941 generic.go:334] "Generic (PLEG): container finished" podID="fed42e7e-ed1d-4463-8088-3d60e06dd00e" containerID="f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77" exitCode=0 Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.427510 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2ee8-account-create-update-56k92" event={"ID":"fed42e7e-ed1d-4463-8088-3d60e06dd00e","Type":"ContainerDied","Data":"f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77"} Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.467647 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7dc97f7589-7ghf8" podStartSLOduration=3.467628547 podStartE2EDuration="3.467628547s" podCreationTimestamp="2025-11-30 07:06:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:25.455689854 +0000 UTC m=+1206.223861463" watchObservedRunningTime="2025-11-30 07:06:25.467628547 +0000 UTC m=+1206.235800156" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.765710 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.915724 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.926119 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.976036 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e76ecf6f-9843-4077-b8ff-602840dac5af-operator-scripts\") pod \"e76ecf6f-9843-4077-b8ff-602840dac5af\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.976147 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5c83930-89f1-45d1-827f-1584ee8ce557-operator-scripts\") pod \"c5c83930-89f1-45d1-827f-1584ee8ce557\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.976263 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmdhm\" (UniqueName: \"kubernetes.io/projected/c5c83930-89f1-45d1-827f-1584ee8ce557-kube-api-access-qmdhm\") pod \"c5c83930-89f1-45d1-827f-1584ee8ce557\" (UID: \"c5c83930-89f1-45d1-827f-1584ee8ce557\") " Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.976342 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2np8h\" (UniqueName: \"kubernetes.io/projected/e76ecf6f-9843-4077-b8ff-602840dac5af-kube-api-access-2np8h\") pod \"e76ecf6f-9843-4077-b8ff-602840dac5af\" (UID: \"e76ecf6f-9843-4077-b8ff-602840dac5af\") " Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.977766 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e76ecf6f-9843-4077-b8ff-602840dac5af-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e76ecf6f-9843-4077-b8ff-602840dac5af" (UID: "e76ecf6f-9843-4077-b8ff-602840dac5af"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.980950 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5c83930-89f1-45d1-827f-1584ee8ce557-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c5c83930-89f1-45d1-827f-1584ee8ce557" (UID: "c5c83930-89f1-45d1-827f-1584ee8ce557"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.985994 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5c83930-89f1-45d1-827f-1584ee8ce557-kube-api-access-qmdhm" (OuterVolumeSpecName: "kube-api-access-qmdhm") pod "c5c83930-89f1-45d1-827f-1584ee8ce557" (UID: "c5c83930-89f1-45d1-827f-1584ee8ce557"). InnerVolumeSpecName "kube-api-access-qmdhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:25 crc kubenswrapper[4941]: I1130 07:06:25.989511 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e76ecf6f-9843-4077-b8ff-602840dac5af-kube-api-access-2np8h" (OuterVolumeSpecName: "kube-api-access-2np8h") pod "e76ecf6f-9843-4077-b8ff-602840dac5af" (UID: "e76ecf6f-9843-4077-b8ff-602840dac5af"). InnerVolumeSpecName "kube-api-access-2np8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.078309 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5c83930-89f1-45d1-827f-1584ee8ce557-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.078367 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmdhm\" (UniqueName: \"kubernetes.io/projected/c5c83930-89f1-45d1-827f-1584ee8ce557-kube-api-access-qmdhm\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.078381 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2np8h\" (UniqueName: \"kubernetes.io/projected/e76ecf6f-9843-4077-b8ff-602840dac5af-kube-api-access-2np8h\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.078390 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e76ecf6f-9843-4077-b8ff-602840dac5af-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.436544 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-j62c8" event={"ID":"c5c83930-89f1-45d1-827f-1584ee8ce557","Type":"ContainerDied","Data":"4502c61dc38e7b051e14649fd3898e023236b6c88a046e42d18cd0a6f72d6b8d"} Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.436583 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4502c61dc38e7b051e14649fd3898e023236b6c88a046e42d18cd0a6f72d6b8d" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.436642 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-j62c8" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.439500 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wgg9g" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.441525 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wgg9g" event={"ID":"e76ecf6f-9843-4077-b8ff-602840dac5af","Type":"ContainerDied","Data":"39cddfbdac82b3d27cff9f33d96a5f1cbd4172e6c25284119e394c097c97cbdd"} Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.441560 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39cddfbdac82b3d27cff9f33d96a5f1cbd4172e6c25284119e394c097c97cbdd" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.862251 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.903295 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed42e7e-ed1d-4463-8088-3d60e06dd00e-operator-scripts\") pod \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.903460 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxjns\" (UniqueName: \"kubernetes.io/projected/fed42e7e-ed1d-4463-8088-3d60e06dd00e-kube-api-access-jxjns\") pod \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\" (UID: \"fed42e7e-ed1d-4463-8088-3d60e06dd00e\") " Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.903834 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fed42e7e-ed1d-4463-8088-3d60e06dd00e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fed42e7e-ed1d-4463-8088-3d60e06dd00e" (UID: "fed42e7e-ed1d-4463-8088-3d60e06dd00e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.904361 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed42e7e-ed1d-4463-8088-3d60e06dd00e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:26 crc kubenswrapper[4941]: I1130 07:06:26.909062 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fed42e7e-ed1d-4463-8088-3d60e06dd00e-kube-api-access-jxjns" (OuterVolumeSpecName: "kube-api-access-jxjns") pod "fed42e7e-ed1d-4463-8088-3d60e06dd00e" (UID: "fed42e7e-ed1d-4463-8088-3d60e06dd00e"). InnerVolumeSpecName "kube-api-access-jxjns". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.005909 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxjns\" (UniqueName: \"kubernetes.io/projected/fed42e7e-ed1d-4463-8088-3d60e06dd00e-kube-api-access-jxjns\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.077130 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.079427 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.089107 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107345 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-operator-scripts\") pod \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107435 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm2kh\" (UniqueName: \"kubernetes.io/projected/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-kube-api-access-bm2kh\") pod \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\" (UID: \"a3f11ab2-1455-484c-8d3d-a09bf34a6f72\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107511 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbc8aeb-8359-4427-843f-c6e2377e2857-operator-scripts\") pod \"ddbc8aeb-8359-4427-843f-c6e2377e2857\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107606 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-operator-scripts\") pod \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107681 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g6gj\" (UniqueName: \"kubernetes.io/projected/ddbc8aeb-8359-4427-843f-c6e2377e2857-kube-api-access-7g6gj\") pod \"ddbc8aeb-8359-4427-843f-c6e2377e2857\" (UID: \"ddbc8aeb-8359-4427-843f-c6e2377e2857\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107709 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgw4t\" (UniqueName: \"kubernetes.io/projected/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-kube-api-access-mgw4t\") pod \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\" (UID: \"66dbaf24-8e9d-4981-a37c-561c8c7e98aa\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.107928 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a3f11ab2-1455-484c-8d3d-a09bf34a6f72" (UID: "a3f11ab2-1455-484c-8d3d-a09bf34a6f72"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.108371 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddbc8aeb-8359-4427-843f-c6e2377e2857-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ddbc8aeb-8359-4427-843f-c6e2377e2857" (UID: "ddbc8aeb-8359-4427-843f-c6e2377e2857"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.108380 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.109522 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66dbaf24-8e9d-4981-a37c-561c8c7e98aa" (UID: "66dbaf24-8e9d-4981-a37c-561c8c7e98aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.116488 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbc8aeb-8359-4427-843f-c6e2377e2857-kube-api-access-7g6gj" (OuterVolumeSpecName: "kube-api-access-7g6gj") pod "ddbc8aeb-8359-4427-843f-c6e2377e2857" (UID: "ddbc8aeb-8359-4427-843f-c6e2377e2857"). InnerVolumeSpecName "kube-api-access-7g6gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.116642 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-kube-api-access-mgw4t" (OuterVolumeSpecName: "kube-api-access-mgw4t") pod "66dbaf24-8e9d-4981-a37c-561c8c7e98aa" (UID: "66dbaf24-8e9d-4981-a37c-561c8c7e98aa"). InnerVolumeSpecName "kube-api-access-mgw4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.116670 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-kube-api-access-bm2kh" (OuterVolumeSpecName: "kube-api-access-bm2kh") pod "a3f11ab2-1455-484c-8d3d-a09bf34a6f72" (UID: "a3f11ab2-1455-484c-8d3d-a09bf34a6f72"). InnerVolumeSpecName "kube-api-access-bm2kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.212221 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm2kh\" (UniqueName: \"kubernetes.io/projected/a3f11ab2-1455-484c-8d3d-a09bf34a6f72-kube-api-access-bm2kh\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.212256 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddbc8aeb-8359-4427-843f-c6e2377e2857-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.212266 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.212274 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g6gj\" (UniqueName: \"kubernetes.io/projected/ddbc8aeb-8359-4427-843f-c6e2377e2857-kube-api-access-7g6gj\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.212283 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgw4t\" (UniqueName: \"kubernetes.io/projected/66dbaf24-8e9d-4981-a37c-561c8c7e98aa-kube-api-access-mgw4t\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.456144 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.456163 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ef1-account-create-update-w7lxz" event={"ID":"66dbaf24-8e9d-4981-a37c-561c8c7e98aa","Type":"ContainerDied","Data":"2b5e93b4ec2b43ad9b7911aef94749fc8f14d340068b4603dda766c05c5d2c42"} Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.456209 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b5e93b4ec2b43ad9b7911aef94749fc8f14d340068b4603dda766c05c5d2c42" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.459042 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-2ee8-account-create-update-56k92" event={"ID":"fed42e7e-ed1d-4463-8088-3d60e06dd00e","Type":"ContainerDied","Data":"959571513a56052738045d81f092b8e4cb9be22319648c2cb7305327ede1a54e"} Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.459070 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="959571513a56052738045d81f092b8e4cb9be22319648c2cb7305327ede1a54e" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.459084 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-2ee8-account-create-update-56k92" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.460515 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4qtbv" event={"ID":"a3f11ab2-1455-484c-8d3d-a09bf34a6f72","Type":"ContainerDied","Data":"5feed6eb2cb532f2c54ced833758c2e5fa1a92cfe4c4490ed9f05975bb51578c"} Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.460538 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4qtbv" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.460546 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5feed6eb2cb532f2c54ced833758c2e5fa1a92cfe4c4490ed9f05975bb51578c" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.461940 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.461938 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-77d0-account-create-update-25gtl" event={"ID":"ddbc8aeb-8359-4427-843f-c6e2377e2857","Type":"ContainerDied","Data":"34665f41922f3fd9d687c9f8daa2df914313ffa9aceb7e3c322e7d30bfc86ba0"} Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.461978 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34665f41922f3fd9d687c9f8daa2df914313ffa9aceb7e3c322e7d30bfc86ba0" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.466141 4941 generic.go:334] "Generic (PLEG): container finished" podID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerID="2b35414a0896dbaa38ec03ad6db919f7a32c9b9d5f88db656e884592de8bd62d" exitCode=0 Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.467364 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerDied","Data":"2b35414a0896dbaa38ec03ad6db919f7a32c9b9d5f88db656e884592de8bd62d"} Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.568747 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.619967 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-scripts\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.620017 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-sg-core-conf-yaml\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.620124 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5p87\" (UniqueName: \"kubernetes.io/projected/ab604dfb-8fe4-4e98-8652-a98369b5d260-kube-api-access-j5p87\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.620221 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-combined-ca-bundle\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.620313 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-log-httpd\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.620381 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-config-data\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.620456 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-run-httpd\") pod \"ab604dfb-8fe4-4e98-8652-a98369b5d260\" (UID: \"ab604dfb-8fe4-4e98-8652-a98369b5d260\") " Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.621124 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.621261 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.625380 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab604dfb-8fe4-4e98-8652-a98369b5d260-kube-api-access-j5p87" (OuterVolumeSpecName: "kube-api-access-j5p87") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "kube-api-access-j5p87". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.625810 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-scripts" (OuterVolumeSpecName: "scripts") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.652863 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.695862 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.721932 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5p87\" (UniqueName: \"kubernetes.io/projected/ab604dfb-8fe4-4e98-8652-a98369b5d260-kube-api-access-j5p87\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.722097 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.722114 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.722122 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab604dfb-8fe4-4e98-8652-a98369b5d260-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.722130 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.722162 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.725602 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-config-data" (OuterVolumeSpecName: "config-data") pod "ab604dfb-8fe4-4e98-8652-a98369b5d260" (UID: "ab604dfb-8fe4-4e98-8652-a98369b5d260"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:27 crc kubenswrapper[4941]: I1130 07:06:27.823849 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab604dfb-8fe4-4e98-8652-a98369b5d260-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.477370 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab604dfb-8fe4-4e98-8652-a98369b5d260","Type":"ContainerDied","Data":"242f9300dcca13e597db3d366127dfb67398d0ffaacbeb5fb4bf687800251033"} Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.477420 4941 scope.go:117] "RemoveContainer" containerID="c0e61a8d6e8681fca905ebcb523b95149808986e1480520d3172dd03109c9c5b" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.477447 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.500222 4941 scope.go:117] "RemoveContainer" containerID="d665c69b49cc8c4f3a5b3024b78164a30243d365fd7ef133b4450db4a1744d30" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.520236 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.532378 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.543519 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544704 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dbaf24-8e9d-4981-a37c-561c8c7e98aa" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544723 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dbaf24-8e9d-4981-a37c-561c8c7e98aa" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544769 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c83930-89f1-45d1-827f-1584ee8ce557" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544776 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c83930-89f1-45d1-827f-1584ee8ce557" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544791 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="proxy-httpd" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544798 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="proxy-httpd" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544813 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3f11ab2-1455-484c-8d3d-a09bf34a6f72" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544838 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3f11ab2-1455-484c-8d3d-a09bf34a6f72" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544861 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-notification-agent" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544867 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-notification-agent" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544878 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="sg-core" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544884 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="sg-core" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544920 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-central-agent" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544926 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-central-agent" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544943 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fed42e7e-ed1d-4463-8088-3d60e06dd00e" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544949 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fed42e7e-ed1d-4463-8088-3d60e06dd00e" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.544958 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddbc8aeb-8359-4427-843f-c6e2377e2857" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.544964 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddbc8aeb-8359-4427-843f-c6e2377e2857" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: E1130 07:06:28.545001 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e76ecf6f-9843-4077-b8ff-602840dac5af" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545008 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e76ecf6f-9843-4077-b8ff-602840dac5af" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545489 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="sg-core" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545514 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c83930-89f1-45d1-827f-1584ee8ce557" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545527 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-notification-agent" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545540 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="ceilometer-central-agent" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545574 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddbc8aeb-8359-4427-843f-c6e2377e2857" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545594 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3f11ab2-1455-484c-8d3d-a09bf34a6f72" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545611 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" containerName="proxy-httpd" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545644 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dbaf24-8e9d-4981-a37c-561c8c7e98aa" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545659 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e76ecf6f-9843-4077-b8ff-602840dac5af" containerName="mariadb-database-create" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.545671 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fed42e7e-ed1d-4463-8088-3d60e06dd00e" containerName="mariadb-account-create-update" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.548972 4941 scope.go:117] "RemoveContainer" containerID="2b35414a0896dbaa38ec03ad6db919f7a32c9b9d5f88db656e884592de8bd62d" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.550268 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.552590 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.553337 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.570773 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.583866 4941 scope.go:117] "RemoveContainer" containerID="d2081b880a27904af1223737519422538d6db133f4caa53fe25b036dc35f6cb4" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.635791 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-log-httpd\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.635851 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-config-data\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.635890 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-scripts\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.635943 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-run-httpd\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.635987 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.636025 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8xc2\" (UniqueName: \"kubernetes.io/projected/86d951f2-9623-47e0-b878-d6fbcf14916c-kube-api-access-v8xc2\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.637939 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740020 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-run-httpd\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740088 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740122 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8xc2\" (UniqueName: \"kubernetes.io/projected/86d951f2-9623-47e0-b878-d6fbcf14916c-kube-api-access-v8xc2\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740143 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740205 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-log-httpd\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740235 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-config-data\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740269 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-scripts\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740547 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-run-httpd\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.740804 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-log-httpd\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.745677 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-config-data\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.745781 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.751218 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-scripts\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.757447 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.757849 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8xc2\" (UniqueName: \"kubernetes.io/projected/86d951f2-9623-47e0-b878-d6fbcf14916c-kube-api-access-v8xc2\") pod \"ceilometer-0\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " pod="openstack/ceilometer-0" Nov 30 07:06:28 crc kubenswrapper[4941]: I1130 07:06:28.874704 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:29 crc kubenswrapper[4941]: I1130 07:06:29.365934 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:29 crc kubenswrapper[4941]: I1130 07:06:29.532677 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab604dfb-8fe4-4e98-8652-a98369b5d260" path="/var/lib/kubelet/pods/ab604dfb-8fe4-4e98-8652-a98369b5d260/volumes" Nov 30 07:06:31 crc kubenswrapper[4941]: I1130 07:06:31.812381 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:32 crc kubenswrapper[4941]: I1130 07:06:32.694705 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:32 crc kubenswrapper[4941]: I1130 07:06:32.695144 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.881235 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5c83930_89f1_45d1_827f_1584ee8ce557.slice/crio-fc82cd08bae8260c63d177cb520a3ad485ad5481cded0679001673d3f6da3c35.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5c83930_89f1_45d1_827f_1584ee8ce557.slice/crio-fc82cd08bae8260c63d177cb520a3ad485ad5481cded0679001673d3f6da3c35.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.888773 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f11ab2_1455_484c_8d3d_a09bf34a6f72.slice/crio-5feed6eb2cb532f2c54ced833758c2e5fa1a92cfe4c4490ed9f05975bb51578c": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f11ab2_1455_484c_8d3d_a09bf34a6f72.slice/crio-5feed6eb2cb532f2c54ced833758c2e5fa1a92cfe4c4490ed9f05975bb51578c: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.889299 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfed42e7e_ed1d_4463_8088_3d60e06dd00e.slice/crio-conmon-f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfed42e7e_ed1d_4463_8088_3d60e06dd00e.slice/crio-conmon-f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.889333 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfed42e7e_ed1d_4463_8088_3d60e06dd00e.slice/crio-f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfed42e7e_ed1d_4463_8088_3d60e06dd00e.slice/crio-f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.889370 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dbaf24_8e9d_4981_a37c_561c8c7e98aa.slice/crio-2b5e93b4ec2b43ad9b7911aef94749fc8f14d340068b4603dda766c05c5d2c42": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dbaf24_8e9d_4981_a37c_561c8c7e98aa.slice/crio-2b5e93b4ec2b43ad9b7911aef94749fc8f14d340068b4603dda766c05c5d2c42: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.889394 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f11ab2_1455_484c_8d3d_a09bf34a6f72.slice/crio-conmon-a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f11ab2_1455_484c_8d3d_a09bf34a6f72.slice/crio-conmon-a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.889583 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f11ab2_1455_484c_8d3d_a09bf34a6f72.slice/crio-a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3f11ab2_1455_484c_8d3d_a09bf34a6f72.slice/crio-a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.892299 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbc8aeb_8359_4427_843f_c6e2377e2857.slice/crio-34665f41922f3fd9d687c9f8daa2df914313ffa9aceb7e3c322e7d30bfc86ba0": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbc8aeb_8359_4427_843f_c6e2377e2857.slice/crio-34665f41922f3fd9d687c9f8daa2df914313ffa9aceb7e3c322e7d30bfc86ba0: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.897748 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbc8aeb_8359_4427_843f_c6e2377e2857.slice/crio-conmon-a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbc8aeb_8359_4427_843f_c6e2377e2857.slice/crio-conmon-a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.897795 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dbaf24_8e9d_4981_a37c_561c8c7e98aa.slice/crio-conmon-a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dbaf24_8e9d_4981_a37c_561c8c7e98aa.slice/crio-conmon-a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.897828 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbc8aeb_8359_4427_843f_c6e2377e2857.slice/crio-a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddbc8aeb_8359_4427_843f_c6e2377e2857.slice/crio-a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: W1130 07:06:32.897862 4941 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dbaf24_8e9d_4981_a37c_561c8c7e98aa.slice/crio-a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66dbaf24_8e9d_4981_a37c_561c8c7e98aa.slice/crio-a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a.scope: no such file or directory Nov 30 07:06:32 crc kubenswrapper[4941]: I1130 07:06:32.978525 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:06:32 crc kubenswrapper[4941]: I1130 07:06:32.978624 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.252174 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9r94"] Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.253299 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.256940 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.257161 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.257695 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cswn5" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.270561 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9r94"] Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.443504 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-scripts\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.443576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-config-data\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.443625 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxcxh\" (UniqueName: \"kubernetes.io/projected/130403d6-7916-4482-8ced-0a334b1ca222-kube-api-access-pxcxh\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.443695 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.537774 4941 generic.go:334] "Generic (PLEG): container finished" podID="1b690508-1059-4ce8-9034-23014064d01c" containerID="3e42afe81d1ede373645db3194928f044f069a410a6cdb30fa97f84d0647fdda" exitCode=137 Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.540422 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b690508-1059-4ce8-9034-23014064d01c","Type":"ContainerDied","Data":"3e42afe81d1ede373645db3194928f044f069a410a6cdb30fa97f84d0647fdda"} Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.545783 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.545925 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-scripts\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.545962 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-config-data\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.546000 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxcxh\" (UniqueName: \"kubernetes.io/projected/130403d6-7916-4482-8ced-0a334b1ca222-kube-api-access-pxcxh\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.556293 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.558582 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-config-data\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.560652 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-scripts\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.563175 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxcxh\" (UniqueName: \"kubernetes.io/projected/130403d6-7916-4482-8ced-0a334b1ca222-kube-api-access-pxcxh\") pod \"nova-cell0-conductor-db-sync-r9r94\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:33 crc kubenswrapper[4941]: I1130 07:06:33.613315 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:34 crc kubenswrapper[4941]: W1130 07:06:34.590414 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86d951f2_9623_47e0_b878_d6fbcf14916c.slice/crio-8beacd445567c8d919016af6de870ec4b4e229dda2fb15eceb1b5261768eca4d WatchSource:0}: Error finding container 8beacd445567c8d919016af6de870ec4b4e229dda2fb15eceb1b5261768eca4d: Status 404 returned error can't find the container with id 8beacd445567c8d919016af6de870ec4b4e229dda2fb15eceb1b5261768eca4d Nov 30 07:06:34 crc kubenswrapper[4941]: I1130 07:06:34.962413 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.081626 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.082822 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjlnk\" (UniqueName: \"kubernetes.io/projected/1b690508-1059-4ce8-9034-23014064d01c-kube-api-access-cjlnk\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.083431 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-combined-ca-bundle\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.083537 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b690508-1059-4ce8-9034-23014064d01c-logs\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.083656 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data-custom\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.083813 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-scripts\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.083878 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b690508-1059-4ce8-9034-23014064d01c-logs" (OuterVolumeSpecName: "logs") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.084008 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b690508-1059-4ce8-9034-23014064d01c-etc-machine-id\") pod \"1b690508-1059-4ce8-9034-23014064d01c\" (UID: \"1b690508-1059-4ce8-9034-23014064d01c\") " Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.084964 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b690508-1059-4ce8-9034-23014064d01c-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.085061 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1b690508-1059-4ce8-9034-23014064d01c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.088343 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b690508-1059-4ce8-9034-23014064d01c-kube-api-access-cjlnk" (OuterVolumeSpecName: "kube-api-access-cjlnk") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "kube-api-access-cjlnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.090032 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.090120 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-scripts" (OuterVolumeSpecName: "scripts") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.116467 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.131505 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data" (OuterVolumeSpecName: "config-data") pod "1b690508-1059-4ce8-9034-23014064d01c" (UID: "1b690508-1059-4ce8-9034-23014064d01c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.186295 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.186337 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1b690508-1059-4ce8-9034-23014064d01c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.186348 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.186358 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjlnk\" (UniqueName: \"kubernetes.io/projected/1b690508-1059-4ce8-9034-23014064d01c-kube-api-access-cjlnk\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.186368 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.186377 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b690508-1059-4ce8-9034-23014064d01c-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.285509 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9r94"] Nov 30 07:06:35 crc kubenswrapper[4941]: W1130 07:06:35.287024 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod130403d6_7916_4482_8ced_0a334b1ca222.slice/crio-e89172d8f511caa0e2dd1009b3174ea4b0426b54a9b867b421a6b3e5ad0b962e WatchSource:0}: Error finding container e89172d8f511caa0e2dd1009b3174ea4b0426b54a9b867b421a6b3e5ad0b962e: Status 404 returned error can't find the container with id e89172d8f511caa0e2dd1009b3174ea4b0426b54a9b867b421a6b3e5ad0b962e Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.557119 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerStarted","Data":"7fd05f091462849219288c4aa2ec4db167dc228def8c742381a3eef62351522c"} Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.557637 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerStarted","Data":"8beacd445567c8d919016af6de870ec4b4e229dda2fb15eceb1b5261768eca4d"} Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.558947 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9r94" event={"ID":"130403d6-7916-4482-8ced-0a334b1ca222","Type":"ContainerStarted","Data":"e89172d8f511caa0e2dd1009b3174ea4b0426b54a9b867b421a6b3e5ad0b962e"} Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.561063 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1b690508-1059-4ce8-9034-23014064d01c","Type":"ContainerDied","Data":"4a97fed393a777c3dfccd3e2a6c90ed3c0ed27658056ee4b8ede3b8328415be3"} Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.561117 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.561130 4941 scope.go:117] "RemoveContainer" containerID="3e42afe81d1ede373645db3194928f044f069a410a6cdb30fa97f84d0647fdda" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.562712 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"5679f4ed-6882-4f85-93b2-02ccff357b48","Type":"ContainerStarted","Data":"7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de"} Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.613170 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.625569 4941 scope.go:117] "RemoveContainer" containerID="8d2ba2c844810ea8a564d33984c34b62520beb9a2b84a184a69226b07035b4eb" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.645766 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.667085 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:35 crc kubenswrapper[4941]: E1130 07:06:35.668783 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api-log" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.668870 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api-log" Nov 30 07:06:35 crc kubenswrapper[4941]: E1130 07:06:35.668952 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.669006 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.670060 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.670140 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b690508-1059-4ce8-9034-23014064d01c" containerName="cinder-api-log" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.670262 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.396783669 podStartE2EDuration="12.67023178s" podCreationTimestamp="2025-11-30 07:06:23 +0000 UTC" firstStartedPulling="2025-11-30 07:06:24.424188337 +0000 UTC m=+1205.192359936" lastFinishedPulling="2025-11-30 07:06:34.697636438 +0000 UTC m=+1215.465808047" observedRunningTime="2025-11-30 07:06:35.62530333 +0000 UTC m=+1216.393474959" watchObservedRunningTime="2025-11-30 07:06:35.67023178 +0000 UTC m=+1216.438403439" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.671171 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.671696 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.676375 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.677244 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.677303 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.700588 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzrjx\" (UniqueName: \"kubernetes.io/projected/1822cd0b-b52d-49d7-b787-a1091edfc585-kube-api-access-wzrjx\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.700812 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-scripts\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.700996 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1822cd0b-b52d-49d7-b787-a1091edfc585-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.701084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data-custom\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.701150 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.701272 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-public-tls-certs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.701424 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.701515 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.701626 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1822cd0b-b52d-49d7-b787-a1091edfc585-logs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803675 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1822cd0b-b52d-49d7-b787-a1091edfc585-logs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803760 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzrjx\" (UniqueName: \"kubernetes.io/projected/1822cd0b-b52d-49d7-b787-a1091edfc585-kube-api-access-wzrjx\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803788 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-scripts\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803834 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1822cd0b-b52d-49d7-b787-a1091edfc585-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data-custom\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803872 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803928 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-public-tls-certs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803948 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.803981 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.804448 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1822cd0b-b52d-49d7-b787-a1091edfc585-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.804791 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1822cd0b-b52d-49d7-b787-a1091edfc585-logs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.809063 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-scripts\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.810041 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.810219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-public-tls-certs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.812435 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.812954 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data-custom\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.813483 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:35 crc kubenswrapper[4941]: I1130 07:06:35.826609 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzrjx\" (UniqueName: \"kubernetes.io/projected/1822cd0b-b52d-49d7-b787-a1091edfc585-kube-api-access-wzrjx\") pod \"cinder-api-0\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " pod="openstack/cinder-api-0" Nov 30 07:06:36 crc kubenswrapper[4941]: I1130 07:06:36.027949 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:06:36 crc kubenswrapper[4941]: I1130 07:06:36.414382 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:06:36 crc kubenswrapper[4941]: I1130 07:06:36.575811 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1822cd0b-b52d-49d7-b787-a1091edfc585","Type":"ContainerStarted","Data":"ebc4760d02ea26948818c3505bbf1ced4d372e9a77ff8f1caf03fce3b5bca38a"} Nov 30 07:06:36 crc kubenswrapper[4941]: I1130 07:06:36.578157 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerStarted","Data":"774b8c45355ca687279d2ce0e9306bf17714772ddef7d4bc3802a1f678f3e508"} Nov 30 07:06:37 crc kubenswrapper[4941]: I1130 07:06:37.536493 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b690508-1059-4ce8-9034-23014064d01c" path="/var/lib/kubelet/pods/1b690508-1059-4ce8-9034-23014064d01c/volumes" Nov 30 07:06:37 crc kubenswrapper[4941]: I1130 07:06:37.606499 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1822cd0b-b52d-49d7-b787-a1091edfc585","Type":"ContainerStarted","Data":"305c3aba8af09bc4d0a3d63208f7e2949135c2ee18761aac46287cf1068feab1"} Nov 30 07:06:37 crc kubenswrapper[4941]: I1130 07:06:37.614210 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerStarted","Data":"3738e251c0006949204ef6cf1a2bcc5789df1fa6005c4cdba21d146286416cce"} Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.627242 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerStarted","Data":"cbd222ea0bd8f46128de0a0c130093495259e65b0ed936f42e5f523b42aed307"} Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.627924 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.627768 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-central-agent" containerID="cri-o://7fd05f091462849219288c4aa2ec4db167dc228def8c742381a3eef62351522c" gracePeriod=30 Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.628036 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="proxy-httpd" containerID="cri-o://cbd222ea0bd8f46128de0a0c130093495259e65b0ed936f42e5f523b42aed307" gracePeriod=30 Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.628130 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-notification-agent" containerID="cri-o://774b8c45355ca687279d2ce0e9306bf17714772ddef7d4bc3802a1f678f3e508" gracePeriod=30 Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.628172 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="sg-core" containerID="cri-o://3738e251c0006949204ef6cf1a2bcc5789df1fa6005c4cdba21d146286416cce" gracePeriod=30 Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.633442 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1822cd0b-b52d-49d7-b787-a1091edfc585","Type":"ContainerStarted","Data":"96e02e36152e3e77bc3bcb37965f0c42f78aaeec0dbbaa72be4ad8497a927704"} Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.633739 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.676313 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=7.143052061 podStartE2EDuration="10.676295147s" podCreationTimestamp="2025-11-30 07:06:28 +0000 UTC" firstStartedPulling="2025-11-30 07:06:34.614808387 +0000 UTC m=+1215.382979996" lastFinishedPulling="2025-11-30 07:06:38.148051473 +0000 UTC m=+1218.916223082" observedRunningTime="2025-11-30 07:06:38.655656203 +0000 UTC m=+1219.423827812" watchObservedRunningTime="2025-11-30 07:06:38.676295147 +0000 UTC m=+1219.444466756" Nov 30 07:06:38 crc kubenswrapper[4941]: I1130 07:06:38.676901 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.6768941760000002 podStartE2EDuration="3.676894176s" podCreationTimestamp="2025-11-30 07:06:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:38.676158382 +0000 UTC m=+1219.444329991" watchObservedRunningTime="2025-11-30 07:06:38.676894176 +0000 UTC m=+1219.445065785" Nov 30 07:06:39 crc kubenswrapper[4941]: I1130 07:06:39.649593 4941 generic.go:334] "Generic (PLEG): container finished" podID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerID="cbd222ea0bd8f46128de0a0c130093495259e65b0ed936f42e5f523b42aed307" exitCode=0 Nov 30 07:06:39 crc kubenswrapper[4941]: I1130 07:06:39.649624 4941 generic.go:334] "Generic (PLEG): container finished" podID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerID="3738e251c0006949204ef6cf1a2bcc5789df1fa6005c4cdba21d146286416cce" exitCode=2 Nov 30 07:06:39 crc kubenswrapper[4941]: I1130 07:06:39.649632 4941 generic.go:334] "Generic (PLEG): container finished" podID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerID="774b8c45355ca687279d2ce0e9306bf17714772ddef7d4bc3802a1f678f3e508" exitCode=0 Nov 30 07:06:39 crc kubenswrapper[4941]: I1130 07:06:39.649676 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerDied","Data":"cbd222ea0bd8f46128de0a0c130093495259e65b0ed936f42e5f523b42aed307"} Nov 30 07:06:39 crc kubenswrapper[4941]: I1130 07:06:39.649718 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerDied","Data":"3738e251c0006949204ef6cf1a2bcc5789df1fa6005c4cdba21d146286416cce"} Nov 30 07:06:39 crc kubenswrapper[4941]: I1130 07:06:39.649729 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerDied","Data":"774b8c45355ca687279d2ce0e9306bf17714772ddef7d4bc3802a1f678f3e508"} Nov 30 07:06:41 crc kubenswrapper[4941]: I1130 07:06:41.675064 4941 generic.go:334] "Generic (PLEG): container finished" podID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerID="7fd05f091462849219288c4aa2ec4db167dc228def8c742381a3eef62351522c" exitCode=0 Nov 30 07:06:41 crc kubenswrapper[4941]: I1130 07:06:41.675127 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerDied","Data":"7fd05f091462849219288c4aa2ec4db167dc228def8c742381a3eef62351522c"} Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.181735 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.185078 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-log" containerID="cri-o://52864eecab672111de43913d9eb689e210590535ffe9e8b70c63d3d9ca0ae2b3" gracePeriod=30 Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.185211 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-httpd" containerID="cri-o://ebbf634be703d8b94988f93bd497dfce040493da4962912a2d0670776bf39096" gracePeriod=30 Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.686448 4941 generic.go:334] "Generic (PLEG): container finished" podID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerID="52864eecab672111de43913d9eb689e210590535ffe9e8b70c63d3d9ca0ae2b3" exitCode=143 Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.686623 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eacf46e9-d6f3-4b22-9999-90bdb57164f9","Type":"ContainerDied","Data":"52864eecab672111de43913d9eb689e210590535ffe9e8b70c63d3d9ca0ae2b3"} Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.916746 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.916968 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-log" containerID="cri-o://54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f" gracePeriod=30 Nov 30 07:06:42 crc kubenswrapper[4941]: I1130 07:06:42.917404 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-httpd" containerID="cri-o://7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f" gracePeriod=30 Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.637440 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.695668 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-combined-ca-bundle\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.695779 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-sg-core-conf-yaml\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.695849 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-config-data\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.695887 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-log-httpd\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.695905 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-run-httpd\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.695958 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-scripts\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.696005 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8xc2\" (UniqueName: \"kubernetes.io/projected/86d951f2-9623-47e0-b878-d6fbcf14916c-kube-api-access-v8xc2\") pod \"86d951f2-9623-47e0-b878-d6fbcf14916c\" (UID: \"86d951f2-9623-47e0-b878-d6fbcf14916c\") " Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.696525 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.696797 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.708624 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86d951f2-9623-47e0-b878-d6fbcf14916c-kube-api-access-v8xc2" (OuterVolumeSpecName: "kube-api-access-v8xc2") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "kube-api-access-v8xc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.710964 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerID="54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f" exitCode=143 Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.711042 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ea693ac4-4f2c-42fa-b59e-080e1f72ff77","Type":"ContainerDied","Data":"54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f"} Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.715466 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-scripts" (OuterVolumeSpecName: "scripts") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.718478 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"86d951f2-9623-47e0-b878-d6fbcf14916c","Type":"ContainerDied","Data":"8beacd445567c8d919016af6de870ec4b4e229dda2fb15eceb1b5261768eca4d"} Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.718517 4941 scope.go:117] "RemoveContainer" containerID="cbd222ea0bd8f46128de0a0c130093495259e65b0ed936f42e5f523b42aed307" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.718518 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.724398 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9r94" event={"ID":"130403d6-7916-4482-8ced-0a334b1ca222","Type":"ContainerStarted","Data":"d60b2e93cd8fe47985ea3c022d6fe900447816293ccb970fad6d8c2750bd7db3"} Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.728156 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.741615 4941 scope.go:117] "RemoveContainer" containerID="3738e251c0006949204ef6cf1a2bcc5789df1fa6005c4cdba21d146286416cce" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.745362 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-r9r94" podStartSLOduration=2.670016614 podStartE2EDuration="10.745346368s" podCreationTimestamp="2025-11-30 07:06:33 +0000 UTC" firstStartedPulling="2025-11-30 07:06:35.289257807 +0000 UTC m=+1216.057429416" lastFinishedPulling="2025-11-30 07:06:43.364587561 +0000 UTC m=+1224.132759170" observedRunningTime="2025-11-30 07:06:43.739833466 +0000 UTC m=+1224.508005075" watchObservedRunningTime="2025-11-30 07:06:43.745346368 +0000 UTC m=+1224.513517977" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.760296 4941 scope.go:117] "RemoveContainer" containerID="774b8c45355ca687279d2ce0e9306bf17714772ddef7d4bc3802a1f678f3e508" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.778407 4941 scope.go:117] "RemoveContainer" containerID="7fd05f091462849219288c4aa2ec4db167dc228def8c742381a3eef62351522c" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.791200 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.799557 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8xc2\" (UniqueName: \"kubernetes.io/projected/86d951f2-9623-47e0-b878-d6fbcf14916c-kube-api-access-v8xc2\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.799586 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.799595 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.799603 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.799612 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86d951f2-9623-47e0-b878-d6fbcf14916c-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.799619 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.814191 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-config-data" (OuterVolumeSpecName: "config-data") pod "86d951f2-9623-47e0-b878-d6fbcf14916c" (UID: "86d951f2-9623-47e0-b878-d6fbcf14916c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:43 crc kubenswrapper[4941]: I1130 07:06:43.900958 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d951f2-9623-47e0-b878-d6fbcf14916c-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.053166 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.061070 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079022 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:44 crc kubenswrapper[4941]: E1130 07:06:44.079453 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="proxy-httpd" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079472 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="proxy-httpd" Nov 30 07:06:44 crc kubenswrapper[4941]: E1130 07:06:44.079492 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="sg-core" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079501 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="sg-core" Nov 30 07:06:44 crc kubenswrapper[4941]: E1130 07:06:44.079528 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-notification-agent" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079535 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-notification-agent" Nov 30 07:06:44 crc kubenswrapper[4941]: E1130 07:06:44.079549 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-central-agent" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079555 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-central-agent" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079719 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="sg-core" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079733 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-central-agent" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079743 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="ceilometer-notification-agent" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.079756 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" containerName="proxy-httpd" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.086154 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.090286 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.090534 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.110092 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-log-httpd\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.110166 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-run-httpd\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.110209 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.110633 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.110885 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-scripts\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.111031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-config-data\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.111085 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bd7gn\" (UniqueName: \"kubernetes.io/projected/4e6a1813-c919-48c6-939b-3e9c74272123-kube-api-access-bd7gn\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.118574 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.212872 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.212922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.212977 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-scripts\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.213021 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-config-data\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.213040 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bd7gn\" (UniqueName: \"kubernetes.io/projected/4e6a1813-c919-48c6-939b-3e9c74272123-kube-api-access-bd7gn\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.213089 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-log-httpd\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.213117 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-run-httpd\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.214263 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-run-httpd\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.215408 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-log-httpd\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.217393 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.218159 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-scripts\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.219108 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.236961 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-config-data\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.241776 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bd7gn\" (UniqueName: \"kubernetes.io/projected/4e6a1813-c919-48c6-939b-3e9c74272123-kube-api-access-bd7gn\") pod \"ceilometer-0\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.428506 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:06:44 crc kubenswrapper[4941]: I1130 07:06:44.997335 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:44 crc kubenswrapper[4941]: W1130 07:06:44.997360 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e6a1813_c919_48c6_939b_3e9c74272123.slice/crio-6832a902a128e4428d353f08cb6478b4069495582ca9727e1e7400cfbb67e0f3 WatchSource:0}: Error finding container 6832a902a128e4428d353f08cb6478b4069495582ca9727e1e7400cfbb67e0f3: Status 404 returned error can't find the container with id 6832a902a128e4428d353f08cb6478b4069495582ca9727e1e7400cfbb67e0f3 Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.534301 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86d951f2-9623-47e0-b878-d6fbcf14916c" path="/var/lib/kubelet/pods/86d951f2-9623-47e0-b878-d6fbcf14916c/volumes" Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.685757 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.756750 4941 generic.go:334] "Generic (PLEG): container finished" podID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerID="ebbf634be703d8b94988f93bd497dfce040493da4962912a2d0670776bf39096" exitCode=0 Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.756831 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eacf46e9-d6f3-4b22-9999-90bdb57164f9","Type":"ContainerDied","Data":"ebbf634be703d8b94988f93bd497dfce040493da4962912a2d0670776bf39096"} Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.758899 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerStarted","Data":"86b36037a0c21984f09821cee0b08b8da2dba44f9bfc363a6be89cfbb3d2d4b9"} Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.758934 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerStarted","Data":"6832a902a128e4428d353f08cb6478b4069495582ca9727e1e7400cfbb67e0f3"} Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.835631 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950095 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-httpd-run\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950261 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk4dt\" (UniqueName: \"kubernetes.io/projected/eacf46e9-d6f3-4b22-9999-90bdb57164f9-kube-api-access-gk4dt\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950291 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950337 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-config-data\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950353 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-scripts\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950412 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-public-tls-certs\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950442 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-logs\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.950524 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-combined-ca-bundle\") pod \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\" (UID: \"eacf46e9-d6f3-4b22-9999-90bdb57164f9\") " Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.951106 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.951136 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-logs" (OuterVolumeSpecName: "logs") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.956717 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eacf46e9-d6f3-4b22-9999-90bdb57164f9-kube-api-access-gk4dt" (OuterVolumeSpecName: "kube-api-access-gk4dt") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "kube-api-access-gk4dt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.959439 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:06:45 crc kubenswrapper[4941]: I1130 07:06:45.960436 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-scripts" (OuterVolumeSpecName: "scripts") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.005069 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.008608 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-config-data" (OuterVolumeSpecName: "config-data") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.020559 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "eacf46e9-d6f3-4b22-9999-90bdb57164f9" (UID: "eacf46e9-d6f3-4b22-9999-90bdb57164f9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052693 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052729 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk4dt\" (UniqueName: \"kubernetes.io/projected/eacf46e9-d6f3-4b22-9999-90bdb57164f9-kube-api-access-gk4dt\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052762 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052772 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052780 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052789 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052799 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eacf46e9-d6f3-4b22-9999-90bdb57164f9-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.052808 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eacf46e9-d6f3-4b22-9999-90bdb57164f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.076975 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.081513 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.143:9292/healthcheck\": read tcp 10.217.0.2:51738->10.217.0.143:9292: read: connection reset by peer" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.081553 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.143:9292/healthcheck\": read tcp 10.217.0.2:51742->10.217.0.143:9292: read: connection reset by peer" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.162208 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.544160 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591199 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54bxv\" (UniqueName: \"kubernetes.io/projected/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-kube-api-access-54bxv\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591336 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591549 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-config-data\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591602 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-internal-tls-certs\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591692 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-combined-ca-bundle\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591721 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-scripts\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591769 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-logs\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.591822 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-httpd-run\") pod \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\" (UID: \"ea693ac4-4f2c-42fa-b59e-080e1f72ff77\") " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.594526 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-logs" (OuterVolumeSpecName: "logs") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.594385 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.607360 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-kube-api-access-54bxv" (OuterVolumeSpecName: "kube-api-access-54bxv") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "kube-api-access-54bxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.607672 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-scripts" (OuterVolumeSpecName: "scripts") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.612705 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.639616 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.666614 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.669562 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-config-data" (OuterVolumeSpecName: "config-data") pod "ea693ac4-4f2c-42fa-b59e-080e1f72ff77" (UID: "ea693ac4-4f2c-42fa-b59e-080e1f72ff77"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695159 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695406 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695469 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695524 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695606 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54bxv\" (UniqueName: \"kubernetes.io/projected/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-kube-api-access-54bxv\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695703 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695768 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.695830 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea693ac4-4f2c-42fa-b59e-080e1f72ff77-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.730542 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.770431 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerID="7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f" exitCode=0 Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.770519 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ea693ac4-4f2c-42fa-b59e-080e1f72ff77","Type":"ContainerDied","Data":"7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f"} Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.770594 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ea693ac4-4f2c-42fa-b59e-080e1f72ff77","Type":"ContainerDied","Data":"024a93520c60e9fc7c564ed20dfccb1736f905931407c4ca073bd583b862c3b0"} Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.770615 4941 scope.go:117] "RemoveContainer" containerID="7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.771688 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.773696 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"eacf46e9-d6f3-4b22-9999-90bdb57164f9","Type":"ContainerDied","Data":"73630b9a9dd461bad2294aa74e41302faf73b4ef1223963181fe1a87afd212d7"} Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.773808 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.780525 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerStarted","Data":"87933eeb2641aafacad34979626582fb8f19596280eabdd0756a64eec188eac3"} Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.797806 4941 scope.go:117] "RemoveContainer" containerID="54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.799979 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.828393 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.831726 4941 scope.go:117] "RemoveContainer" containerID="7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f" Nov 30 07:06:46 crc kubenswrapper[4941]: E1130 07:06:46.832260 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f\": container with ID starting with 7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f not found: ID does not exist" containerID="7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.832300 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f"} err="failed to get container status \"7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f\": rpc error: code = NotFound desc = could not find container \"7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f\": container with ID starting with 7e461206c7fd08ebc924d6ccefe7455ab900b4dc7d74123cc6a3ac82f4f2638f not found: ID does not exist" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.832342 4941 scope.go:117] "RemoveContainer" containerID="54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f" Nov 30 07:06:46 crc kubenswrapper[4941]: E1130 07:06:46.832843 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f\": container with ID starting with 54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f not found: ID does not exist" containerID="54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.832882 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f"} err="failed to get container status \"54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f\": rpc error: code = NotFound desc = could not find container \"54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f\": container with ID starting with 54afcc80148ce65dcf8979185a5f911729f6d217f65aae0cc4126bf27ab13c3f not found: ID does not exist" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.832905 4941 scope.go:117] "RemoveContainer" containerID="ebbf634be703d8b94988f93bd497dfce040493da4962912a2d0670776bf39096" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.844080 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.865775 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.888239 4941 scope.go:117] "RemoveContainer" containerID="52864eecab672111de43913d9eb689e210590535ffe9e8b70c63d3d9ca0ae2b3" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.901584 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:06:46 crc kubenswrapper[4941]: E1130 07:06:46.903912 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-log" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.903944 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-log" Nov 30 07:06:46 crc kubenswrapper[4941]: E1130 07:06:46.903967 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-httpd" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.903977 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-httpd" Nov 30 07:06:46 crc kubenswrapper[4941]: E1130 07:06:46.903993 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-log" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.904003 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-log" Nov 30 07:06:46 crc kubenswrapper[4941]: E1130 07:06:46.904541 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-httpd" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.904606 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-httpd" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.905304 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-log" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.905350 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-httpd" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.905368 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" containerName="glance-log" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.905379 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" containerName="glance-httpd" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.906811 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.940476 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-vv8hz" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.940744 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.940906 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.941668 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.965471 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:06:46 crc kubenswrapper[4941]: I1130 07:06:46.988504 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.014440 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.018382 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032525 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032603 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-logs\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032671 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032822 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032905 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmxtj\" (UniqueName: \"kubernetes.io/projected/a376fae9-3d2f-4247-b917-0d63e6f4a9da-kube-api-access-lmxtj\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032942 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-config-data\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.032976 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-scripts\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.033020 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.034383 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.038838 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.062387 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134391 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-config-data\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-scripts\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134485 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134536 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134563 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-logs\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134595 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134651 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.134684 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmxtj\" (UniqueName: \"kubernetes.io/projected/a376fae9-3d2f-4247-b917-0d63e6f4a9da-kube-api-access-lmxtj\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.135527 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-logs\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.135949 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.136545 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.146209 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.146962 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-scripts\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.147462 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.148051 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-config-data\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.158491 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmxtj\" (UniqueName: \"kubernetes.io/projected/a376fae9-3d2f-4247-b917-0d63e6f4a9da-kube-api-access-lmxtj\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.183969 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.235960 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236024 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236062 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmkzj\" (UniqueName: \"kubernetes.io/projected/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-kube-api-access-vmkzj\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236105 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236131 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-logs\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236172 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.236191 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.278192 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337360 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-logs\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337733 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337754 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337819 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337859 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337889 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337910 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmkzj\" (UniqueName: \"kubernetes.io/projected/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-kube-api-access-vmkzj\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.337930 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.338770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.338974 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-logs\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.339267 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.350135 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.350658 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.350681 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.351017 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.374885 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmkzj\" (UniqueName: \"kubernetes.io/projected/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-kube-api-access-vmkzj\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.387117 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.407837 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.540002 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea693ac4-4f2c-42fa-b59e-080e1f72ff77" path="/var/lib/kubelet/pods/ea693ac4-4f2c-42fa-b59e-080e1f72ff77/volumes" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.541260 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eacf46e9-d6f3-4b22-9999-90bdb57164f9" path="/var/lib/kubelet/pods/eacf46e9-d6f3-4b22-9999-90bdb57164f9/volumes" Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.790246 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerStarted","Data":"4c266c605530c1a658283a22bf5f7bf90bdb75fea88b84b558373796df8d6e99"} Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.868832 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:06:47 crc kubenswrapper[4941]: I1130 07:06:47.991792 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.824481 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerStarted","Data":"fff7bdc943505c156c378414fdd2945d48a51f808fe0d3f663c154c310b94566"} Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.824894 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="proxy-httpd" containerID="cri-o://fff7bdc943505c156c378414fdd2945d48a51f808fe0d3f663c154c310b94566" gracePeriod=30 Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.824942 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="sg-core" containerID="cri-o://4c266c605530c1a658283a22bf5f7bf90bdb75fea88b84b558373796df8d6e99" gracePeriod=30 Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.825056 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-notification-agent" containerID="cri-o://87933eeb2641aafacad34979626582fb8f19596280eabdd0756a64eec188eac3" gracePeriod=30 Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.824628 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-central-agent" containerID="cri-o://86b36037a0c21984f09821cee0b08b8da2dba44f9bfc363a6be89cfbb3d2d4b9" gracePeriod=30 Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.826772 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.826806 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.841524 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a573f7e0-ee6d-4847-a778-5f6ef41fd17f","Type":"ContainerStarted","Data":"9d4cad614888876b728ec4d308e74ba608a61af7f849e758a89e8b1fc6414299"} Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.841572 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a573f7e0-ee6d-4847-a778-5f6ef41fd17f","Type":"ContainerStarted","Data":"30d8dd9623e9768efc57f7f70f7831339e77e587c388cf1163641171e80b166d"} Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.847707 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.361117386 podStartE2EDuration="4.847684937s" podCreationTimestamp="2025-11-30 07:06:44 +0000 UTC" firstStartedPulling="2025-11-30 07:06:45.000748164 +0000 UTC m=+1225.768919783" lastFinishedPulling="2025-11-30 07:06:48.487315725 +0000 UTC m=+1229.255487334" observedRunningTime="2025-11-30 07:06:48.847108909 +0000 UTC m=+1229.615280768" watchObservedRunningTime="2025-11-30 07:06:48.847684937 +0000 UTC m=+1229.615856546" Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.848924 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a376fae9-3d2f-4247-b917-0d63e6f4a9da","Type":"ContainerStarted","Data":"68e80ed464e919522f447820599362734c316c8d7a5459adff802d5f94a52e5f"} Nov 30 07:06:48 crc kubenswrapper[4941]: I1130 07:06:48.848980 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a376fae9-3d2f-4247-b917-0d63e6f4a9da","Type":"ContainerStarted","Data":"f3c5cd2caf7497c4b65c0a14258d0f670900d8a52cd79944430769533545db82"} Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.894377 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a573f7e0-ee6d-4847-a778-5f6ef41fd17f","Type":"ContainerStarted","Data":"6a679ded02687521bbab8ba0d141ec44027e549ed85d08f01b4106e0cd6db8b8"} Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.913152 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a376fae9-3d2f-4247-b917-0d63e6f4a9da","Type":"ContainerStarted","Data":"dfb235dd19464a6b133288b3327f7e345c63de23737ba8b6a478ad9216727e2b"} Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.917255 4941 generic.go:334] "Generic (PLEG): container finished" podID="4e6a1813-c919-48c6-939b-3e9c74272123" containerID="4c266c605530c1a658283a22bf5f7bf90bdb75fea88b84b558373796df8d6e99" exitCode=2 Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.917292 4941 generic.go:334] "Generic (PLEG): container finished" podID="4e6a1813-c919-48c6-939b-3e9c74272123" containerID="87933eeb2641aafacad34979626582fb8f19596280eabdd0756a64eec188eac3" exitCode=0 Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.917320 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerDied","Data":"4c266c605530c1a658283a22bf5f7bf90bdb75fea88b84b558373796df8d6e99"} Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.917366 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerDied","Data":"87933eeb2641aafacad34979626582fb8f19596280eabdd0756a64eec188eac3"} Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.929511 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.929499089 podStartE2EDuration="3.929499089s" podCreationTimestamp="2025-11-30 07:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:49.927843289 +0000 UTC m=+1230.696014898" watchObservedRunningTime="2025-11-30 07:06:49.929499089 +0000 UTC m=+1230.697670698" Nov 30 07:06:49 crc kubenswrapper[4941]: I1130 07:06:49.963944 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.963924221 podStartE2EDuration="3.963924221s" podCreationTimestamp="2025-11-30 07:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:06:49.958022611 +0000 UTC m=+1230.726194220" watchObservedRunningTime="2025-11-30 07:06:49.963924221 +0000 UTC m=+1230.732095830" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.005285 4941 generic.go:334] "Generic (PLEG): container finished" podID="130403d6-7916-4482-8ced-0a334b1ca222" containerID="d60b2e93cd8fe47985ea3c022d6fe900447816293ccb970fad6d8c2750bd7db3" exitCode=0 Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.005368 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9r94" event={"ID":"130403d6-7916-4482-8ced-0a334b1ca222","Type":"ContainerDied","Data":"d60b2e93cd8fe47985ea3c022d6fe900447816293ccb970fad6d8c2750bd7db3"} Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.279224 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.279270 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.313422 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.320950 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.404846 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.409047 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.442638 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:57 crc kubenswrapper[4941]: I1130 07:06:57.451913 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.016905 4941 generic.go:334] "Generic (PLEG): container finished" podID="4e6a1813-c919-48c6-939b-3e9c74272123" containerID="86b36037a0c21984f09821cee0b08b8da2dba44f9bfc363a6be89cfbb3d2d4b9" exitCode=0 Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.017061 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerDied","Data":"86b36037a0c21984f09821cee0b08b8da2dba44f9bfc363a6be89cfbb3d2d4b9"} Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.017615 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.017661 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.017681 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.017704 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.442956 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.577612 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxcxh\" (UniqueName: \"kubernetes.io/projected/130403d6-7916-4482-8ced-0a334b1ca222-kube-api-access-pxcxh\") pod \"130403d6-7916-4482-8ced-0a334b1ca222\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.577705 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-scripts\") pod \"130403d6-7916-4482-8ced-0a334b1ca222\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.577888 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-config-data\") pod \"130403d6-7916-4482-8ced-0a334b1ca222\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.578028 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-combined-ca-bundle\") pod \"130403d6-7916-4482-8ced-0a334b1ca222\" (UID: \"130403d6-7916-4482-8ced-0a334b1ca222\") " Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.583251 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/130403d6-7916-4482-8ced-0a334b1ca222-kube-api-access-pxcxh" (OuterVolumeSpecName: "kube-api-access-pxcxh") pod "130403d6-7916-4482-8ced-0a334b1ca222" (UID: "130403d6-7916-4482-8ced-0a334b1ca222"). InnerVolumeSpecName "kube-api-access-pxcxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.583421 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-scripts" (OuterVolumeSpecName: "scripts") pod "130403d6-7916-4482-8ced-0a334b1ca222" (UID: "130403d6-7916-4482-8ced-0a334b1ca222"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.603722 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "130403d6-7916-4482-8ced-0a334b1ca222" (UID: "130403d6-7916-4482-8ced-0a334b1ca222"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.607827 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-config-data" (OuterVolumeSpecName: "config-data") pod "130403d6-7916-4482-8ced-0a334b1ca222" (UID: "130403d6-7916-4482-8ced-0a334b1ca222"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.680547 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.680696 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxcxh\" (UniqueName: \"kubernetes.io/projected/130403d6-7916-4482-8ced-0a334b1ca222-kube-api-access-pxcxh\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.680757 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:58 crc kubenswrapper[4941]: I1130 07:06:58.680818 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/130403d6-7916-4482-8ced-0a334b1ca222-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.030590 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9r94" event={"ID":"130403d6-7916-4482-8ced-0a334b1ca222","Type":"ContainerDied","Data":"e89172d8f511caa0e2dd1009b3174ea4b0426b54a9b867b421a6b3e5ad0b962e"} Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.031947 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e89172d8f511caa0e2dd1009b3174ea4b0426b54a9b867b421a6b3e5ad0b962e" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.031974 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9r94" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.146952 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 07:06:59 crc kubenswrapper[4941]: E1130 07:06:59.147370 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="130403d6-7916-4482-8ced-0a334b1ca222" containerName="nova-cell0-conductor-db-sync" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.147388 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="130403d6-7916-4482-8ced-0a334b1ca222" containerName="nova-cell0-conductor-db-sync" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.147586 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="130403d6-7916-4482-8ced-0a334b1ca222" containerName="nova-cell0-conductor-db-sync" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.148177 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.157862 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.161615 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cswn5" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.186443 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.293577 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.293732 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f26k6\" (UniqueName: \"kubernetes.io/projected/88971618-54e2-4670-be08-a6ae63ed99df-kube-api-access-f26k6\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.293762 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.395945 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f26k6\" (UniqueName: \"kubernetes.io/projected/88971618-54e2-4670-be08-a6ae63ed99df-kube-api-access-f26k6\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.396018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.396092 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.417040 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.417042 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.422114 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f26k6\" (UniqueName: \"kubernetes.io/projected/88971618-54e2-4670-be08-a6ae63ed99df-kube-api-access-f26k6\") pod \"nova-cell0-conductor-0\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.472793 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.939304 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.943898 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 07:06:59 crc kubenswrapper[4941]: I1130 07:06:59.951733 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 07:07:00 crc kubenswrapper[4941]: I1130 07:07:00.033917 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 07:07:00 crc kubenswrapper[4941]: I1130 07:07:00.033967 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 07:07:00 crc kubenswrapper[4941]: I1130 07:07:00.045568 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"88971618-54e2-4670-be08-a6ae63ed99df","Type":"ContainerStarted","Data":"a03be0166325f65a71d81d071981e4f4c659c9f712b8c18b492acf4ba96108ec"} Nov 30 07:07:01 crc kubenswrapper[4941]: I1130 07:07:01.057696 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"88971618-54e2-4670-be08-a6ae63ed99df","Type":"ContainerStarted","Data":"ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93"} Nov 30 07:07:02 crc kubenswrapper[4941]: I1130 07:07:02.064939 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 30 07:07:02 crc kubenswrapper[4941]: I1130 07:07:02.978217 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:07:02 crc kubenswrapper[4941]: I1130 07:07:02.978786 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:07:09 crc kubenswrapper[4941]: I1130 07:07:09.518113 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 30 07:07:09 crc kubenswrapper[4941]: I1130 07:07:09.534717 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=10.534702431 podStartE2EDuration="10.534702431s" podCreationTimestamp="2025-11-30 07:06:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:01.085850959 +0000 UTC m=+1241.854022568" watchObservedRunningTime="2025-11-30 07:07:09.534702431 +0000 UTC m=+1250.302874040" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.026040 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-6hd2t"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.027546 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.030098 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.030134 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.044590 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6hd2t"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.160774 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg7z8\" (UniqueName: \"kubernetes.io/projected/6f615e12-dbe4-4037-ae70-3fe72dade25a-kube-api-access-gg7z8\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.160829 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-scripts\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.160879 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-config-data\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.160996 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.181861 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.183571 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.188354 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.198198 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.260788 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263302 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fth5\" (UniqueName: \"kubernetes.io/projected/5ab7bd0a-4dad-414f-b2da-03b57b471309-kube-api-access-2fth5\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263372 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263444 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263475 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg7z8\" (UniqueName: \"kubernetes.io/projected/6f615e12-dbe4-4037-ae70-3fe72dade25a-kube-api-access-gg7z8\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263498 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-scripts\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263524 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-config-data\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.263549 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.264802 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.269868 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-scripts\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.270369 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.289962 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.291860 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.298110 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-config-data\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.328916 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg7z8\" (UniqueName: \"kubernetes.io/projected/6f615e12-dbe4-4037-ae70-3fe72dade25a-kube-api-access-gg7z8\") pod \"nova-cell0-cell-mapping-6hd2t\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.364903 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.364973 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fth5\" (UniqueName: \"kubernetes.io/projected/5ab7bd0a-4dad-414f-b2da-03b57b471309-kube-api-access-2fth5\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.365037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d72aaf76-03a2-461b-ac6c-2847186f2359-logs\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.365085 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx7nd\" (UniqueName: \"kubernetes.io/projected/d72aaf76-03a2-461b-ac6c-2847186f2359-kube-api-access-lx7nd\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.365104 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-config-data\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.365121 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.365138 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.367437 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.370826 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.373618 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.458831 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fth5\" (UniqueName: \"kubernetes.io/projected/5ab7bd0a-4dad-414f-b2da-03b57b471309-kube-api-access-2fth5\") pod \"nova-cell1-novncproxy-0\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.466420 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d72aaf76-03a2-461b-ac6c-2847186f2359-logs\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.466485 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx7nd\" (UniqueName: \"kubernetes.io/projected/d72aaf76-03a2-461b-ac6c-2847186f2359-kube-api-access-lx7nd\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.466504 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-config-data\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.466522 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.468938 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d72aaf76-03a2-461b-ac6c-2847186f2359-logs\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.495998 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-config-data\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.498997 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.507739 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.527013 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx7nd\" (UniqueName: \"kubernetes.io/projected/d72aaf76-03a2-461b-ac6c-2847186f2359-kube-api-access-lx7nd\") pod \"nova-metadata-0\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.570134 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.571608 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.585218 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.606539 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.646700 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.743383 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-7tm62"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.745225 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.774240 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6sm\" (UniqueName: \"kubernetes.io/projected/3adcdb0b-5b85-4649-bd98-04e946483dc5-kube-api-access-nv6sm\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.774471 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.774620 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-config-data\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.790595 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-7tm62"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.816282 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.837706 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.851184 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.894063 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.906047 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-config\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.906148 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-nb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.906290 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-sb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.906863 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6sm\" (UniqueName: \"kubernetes.io/projected/3adcdb0b-5b85-4649-bd98-04e946483dc5-kube-api-access-nv6sm\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.907155 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.907200 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-svc\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.907943 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-config-data\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.908024 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-swift-storage-0\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.908106 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bxjb\" (UniqueName: \"kubernetes.io/projected/8e818a15-1640-4100-b312-18c88fff65b0-kube-api-access-5bxjb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.914136 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-config-data\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.915035 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.928880 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6sm\" (UniqueName: \"kubernetes.io/projected/3adcdb0b-5b85-4649-bd98-04e946483dc5-kube-api-access-nv6sm\") pod \"nova-scheduler-0\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:10 crc kubenswrapper[4941]: I1130 07:07:10.976600 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010644 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-config-data\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010762 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-swift-storage-0\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010800 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6403f017-3e24-4451-85c1-52fd4eef0ed7-logs\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010842 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bxjb\" (UniqueName: \"kubernetes.io/projected/8e818a15-1640-4100-b312-18c88fff65b0-kube-api-access-5bxjb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010888 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010938 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jpw4\" (UniqueName: \"kubernetes.io/projected/6403f017-3e24-4451-85c1-52fd4eef0ed7-kube-api-access-7jpw4\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.010964 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-config\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.011028 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-nb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.011057 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-sb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.011113 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-svc\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.014798 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-sb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.015468 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-config\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.015967 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-swift-storage-0\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.016090 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-nb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.016810 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-svc\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.035500 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bxjb\" (UniqueName: \"kubernetes.io/projected/8e818a15-1640-4100-b312-18c88fff65b0-kube-api-access-5bxjb\") pod \"dnsmasq-dns-5594d9b959-7tm62\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.078745 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.112454 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-config-data\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.112539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6403f017-3e24-4451-85c1-52fd4eef0ed7-logs\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.112565 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.112589 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jpw4\" (UniqueName: \"kubernetes.io/projected/6403f017-3e24-4451-85c1-52fd4eef0ed7-kube-api-access-7jpw4\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.113797 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6403f017-3e24-4451-85c1-52fd4eef0ed7-logs\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.117723 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-config-data\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.119088 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.138728 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jpw4\" (UniqueName: \"kubernetes.io/projected/6403f017-3e24-4451-85c1-52fd4eef0ed7-kube-api-access-7jpw4\") pod \"nova-api-0\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.172114 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.252697 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-6hd2t"] Nov 30 07:07:11 crc kubenswrapper[4941]: W1130 07:07:11.264057 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f615e12_dbe4_4037_ae70_3fe72dade25a.slice/crio-016822a10da3106320ce671a136714f53ce53ee51f54be91bcb0c92e005d6c6e WatchSource:0}: Error finding container 016822a10da3106320ce671a136714f53ce53ee51f54be91bcb0c92e005d6c6e: Status 404 returned error can't find the container with id 016822a10da3106320ce671a136714f53ce53ee51f54be91bcb0c92e005d6c6e Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.269157 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.397670 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jn7qf"] Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.399278 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.404684 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.404967 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.409511 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.419424 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jn7qf"] Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.493760 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:11 crc kubenswrapper[4941]: W1130 07:07:11.498486 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3adcdb0b_5b85_4649_bd98_04e946483dc5.slice/crio-0cc9229c8179385452483d6777dfc5374b7176ae81d8c6395738c3292d1b8767 WatchSource:0}: Error finding container 0cc9229c8179385452483d6777dfc5374b7176ae81d8c6395738c3292d1b8767: Status 404 returned error can't find the container with id 0cc9229c8179385452483d6777dfc5374b7176ae81d8c6395738c3292d1b8767 Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.533797 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-config-data\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.533889 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-scripts\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.533919 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdc5q\" (UniqueName: \"kubernetes.io/projected/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-kube-api-access-cdc5q\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.533938 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.634671 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-scripts\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.634719 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdc5q\" (UniqueName: \"kubernetes.io/projected/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-kube-api-access-cdc5q\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.634748 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.634836 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-config-data\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.641781 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-scripts\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.641900 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.641960 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-config-data\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.656845 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdc5q\" (UniqueName: \"kubernetes.io/projected/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-kube-api-access-cdc5q\") pod \"nova-cell1-conductor-db-sync-jn7qf\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.750167 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-7tm62"] Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.763913 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:11 crc kubenswrapper[4941]: I1130 07:07:11.871057 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.203149 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3adcdb0b-5b85-4649-bd98-04e946483dc5","Type":"ContainerStarted","Data":"0cc9229c8179385452483d6777dfc5374b7176ae81d8c6395738c3292d1b8767"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.205882 4941 generic.go:334] "Generic (PLEG): container finished" podID="8e818a15-1640-4100-b312-18c88fff65b0" containerID="b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816" exitCode=0 Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.205956 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" event={"ID":"8e818a15-1640-4100-b312-18c88fff65b0","Type":"ContainerDied","Data":"b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.205974 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" event={"ID":"8e818a15-1640-4100-b312-18c88fff65b0","Type":"ContainerStarted","Data":"489f7c34b5660e80045220ab667e006f0757dea92c93ec674650c009fd0b9f13"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.210085 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6403f017-3e24-4451-85c1-52fd4eef0ed7","Type":"ContainerStarted","Data":"4d982826139dba5d3606cdcbd2b8346337a93f55f6b580174449cc4fde9c9e4c"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.213572 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d72aaf76-03a2-461b-ac6c-2847186f2359","Type":"ContainerStarted","Data":"72d7d56608fe5ece349497b673e8d2f6c61bc230c43cadfdc78b62e7f4c15d2f"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.216150 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ab7bd0a-4dad-414f-b2da-03b57b471309","Type":"ContainerStarted","Data":"1d51be00db5d274d0cbb067058f3ff8f9d655c227b9d284110de41b192681229"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.221219 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6hd2t" event={"ID":"6f615e12-dbe4-4037-ae70-3fe72dade25a","Type":"ContainerStarted","Data":"0c5d9a8e5bf04936fcd7fef9a3fbc0f88f628cb0ea4146cdb0d370970a42b1f4"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.221316 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6hd2t" event={"ID":"6f615e12-dbe4-4037-ae70-3fe72dade25a","Type":"ContainerStarted","Data":"016822a10da3106320ce671a136714f53ce53ee51f54be91bcb0c92e005d6c6e"} Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.252167 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-6hd2t" podStartSLOduration=2.252152244 podStartE2EDuration="2.252152244s" podCreationTimestamp="2025-11-30 07:07:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:12.250652417 +0000 UTC m=+1253.018824026" watchObservedRunningTime="2025-11-30 07:07:12.252152244 +0000 UTC m=+1253.020323853" Nov 30 07:07:12 crc kubenswrapper[4941]: I1130 07:07:12.382109 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jn7qf"] Nov 30 07:07:12 crc kubenswrapper[4941]: W1130 07:07:12.391357 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod940ccc92_8cc8_4e78_b862_a7f5fa3d9288.slice/crio-b54942982f400cab903eb1007a6f5518f8406a93b79413f9752233a095f779ff WatchSource:0}: Error finding container b54942982f400cab903eb1007a6f5518f8406a93b79413f9752233a095f779ff: Status 404 returned error can't find the container with id b54942982f400cab903eb1007a6f5518f8406a93b79413f9752233a095f779ff Nov 30 07:07:13 crc kubenswrapper[4941]: I1130 07:07:13.248527 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" event={"ID":"8e818a15-1640-4100-b312-18c88fff65b0","Type":"ContainerStarted","Data":"23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b"} Nov 30 07:07:13 crc kubenswrapper[4941]: I1130 07:07:13.248881 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:13 crc kubenswrapper[4941]: I1130 07:07:13.253429 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" event={"ID":"940ccc92-8cc8-4e78-b862-a7f5fa3d9288","Type":"ContainerStarted","Data":"e3ee661e169cba152a8653fbfa50300784a65ee412f2fc53dfa56f11c302721e"} Nov 30 07:07:13 crc kubenswrapper[4941]: I1130 07:07:13.253471 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" event={"ID":"940ccc92-8cc8-4e78-b862-a7f5fa3d9288","Type":"ContainerStarted","Data":"b54942982f400cab903eb1007a6f5518f8406a93b79413f9752233a095f779ff"} Nov 30 07:07:13 crc kubenswrapper[4941]: I1130 07:07:13.284314 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" podStartSLOduration=3.284289794 podStartE2EDuration="3.284289794s" podCreationTimestamp="2025-11-30 07:07:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:13.273532975 +0000 UTC m=+1254.041704584" watchObservedRunningTime="2025-11-30 07:07:13.284289794 +0000 UTC m=+1254.052461403" Nov 30 07:07:13 crc kubenswrapper[4941]: I1130 07:07:13.302054 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" podStartSLOduration=2.302036316 podStartE2EDuration="2.302036316s" podCreationTimestamp="2025-11-30 07:07:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:13.293819824 +0000 UTC m=+1254.061991433" watchObservedRunningTime="2025-11-30 07:07:13.302036316 +0000 UTC m=+1254.070207925" Nov 30 07:07:14 crc kubenswrapper[4941]: I1130 07:07:14.435829 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Nov 30 07:07:14 crc kubenswrapper[4941]: I1130 07:07:14.967544 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.028221 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.275636 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ab7bd0a-4dad-414f-b2da-03b57b471309","Type":"ContainerStarted","Data":"df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c"} Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.278489 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3adcdb0b-5b85-4649-bd98-04e946483dc5","Type":"ContainerStarted","Data":"b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5"} Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.281583 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6403f017-3e24-4451-85c1-52fd4eef0ed7","Type":"ContainerStarted","Data":"8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72"} Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.281621 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6403f017-3e24-4451-85c1-52fd4eef0ed7","Type":"ContainerStarted","Data":"ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9"} Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.286053 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d72aaf76-03a2-461b-ac6c-2847186f2359","Type":"ContainerStarted","Data":"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad"} Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.286097 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d72aaf76-03a2-461b-ac6c-2847186f2359","Type":"ContainerStarted","Data":"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a"} Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.286375 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-metadata" containerID="cri-o://cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad" gracePeriod=30 Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.286374 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-log" containerID="cri-o://80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a" gracePeriod=30 Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.296571 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.29363001 podStartE2EDuration="5.296551645s" podCreationTimestamp="2025-11-30 07:07:10 +0000 UTC" firstStartedPulling="2025-11-30 07:07:11.407674297 +0000 UTC m=+1252.175845906" lastFinishedPulling="2025-11-30 07:07:14.410595912 +0000 UTC m=+1255.178767541" observedRunningTime="2025-11-30 07:07:15.295089291 +0000 UTC m=+1256.063260900" watchObservedRunningTime="2025-11-30 07:07:15.296551645 +0000 UTC m=+1256.064723254" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.328750 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.173277624 podStartE2EDuration="5.328732909s" podCreationTimestamp="2025-11-30 07:07:10 +0000 UTC" firstStartedPulling="2025-11-30 07:07:11.261556382 +0000 UTC m=+1252.029727991" lastFinishedPulling="2025-11-30 07:07:14.417011667 +0000 UTC m=+1255.185183276" observedRunningTime="2025-11-30 07:07:15.323702864 +0000 UTC m=+1256.091874473" watchObservedRunningTime="2025-11-30 07:07:15.328732909 +0000 UTC m=+1256.096904518" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.367286 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.457970773 podStartE2EDuration="5.367268176s" podCreationTimestamp="2025-11-30 07:07:10 +0000 UTC" firstStartedPulling="2025-11-30 07:07:11.500305998 +0000 UTC m=+1252.268477607" lastFinishedPulling="2025-11-30 07:07:14.409603401 +0000 UTC m=+1255.177775010" observedRunningTime="2025-11-30 07:07:15.364073639 +0000 UTC m=+1256.132245258" watchObservedRunningTime="2025-11-30 07:07:15.367268176 +0000 UTC m=+1256.135439785" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.368313 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.723528568 podStartE2EDuration="5.368307547s" podCreationTimestamp="2025-11-30 07:07:10 +0000 UTC" firstStartedPulling="2025-11-30 07:07:11.767535995 +0000 UTC m=+1252.535707624" lastFinishedPulling="2025-11-30 07:07:14.412314994 +0000 UTC m=+1255.180486603" observedRunningTime="2025-11-30 07:07:15.350147983 +0000 UTC m=+1256.118319592" watchObservedRunningTime="2025-11-30 07:07:15.368307547 +0000 UTC m=+1256.136479156" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.508362 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.607145 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.607211 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.894446 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.959897 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx7nd\" (UniqueName: \"kubernetes.io/projected/d72aaf76-03a2-461b-ac6c-2847186f2359-kube-api-access-lx7nd\") pod \"d72aaf76-03a2-461b-ac6c-2847186f2359\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.961493 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-combined-ca-bundle\") pod \"d72aaf76-03a2-461b-ac6c-2847186f2359\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.961616 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d72aaf76-03a2-461b-ac6c-2847186f2359-logs\") pod \"d72aaf76-03a2-461b-ac6c-2847186f2359\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.961716 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-config-data\") pod \"d72aaf76-03a2-461b-ac6c-2847186f2359\" (UID: \"d72aaf76-03a2-461b-ac6c-2847186f2359\") " Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.964814 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d72aaf76-03a2-461b-ac6c-2847186f2359-logs" (OuterVolumeSpecName: "logs") pod "d72aaf76-03a2-461b-ac6c-2847186f2359" (UID: "d72aaf76-03a2-461b-ac6c-2847186f2359"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.977890 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 07:07:15 crc kubenswrapper[4941]: I1130 07:07:15.988662 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d72aaf76-03a2-461b-ac6c-2847186f2359-kube-api-access-lx7nd" (OuterVolumeSpecName: "kube-api-access-lx7nd") pod "d72aaf76-03a2-461b-ac6c-2847186f2359" (UID: "d72aaf76-03a2-461b-ac6c-2847186f2359"). InnerVolumeSpecName "kube-api-access-lx7nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.003219 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-config-data" (OuterVolumeSpecName: "config-data") pod "d72aaf76-03a2-461b-ac6c-2847186f2359" (UID: "d72aaf76-03a2-461b-ac6c-2847186f2359"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.005955 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d72aaf76-03a2-461b-ac6c-2847186f2359" (UID: "d72aaf76-03a2-461b-ac6c-2847186f2359"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.065738 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx7nd\" (UniqueName: \"kubernetes.io/projected/d72aaf76-03a2-461b-ac6c-2847186f2359-kube-api-access-lx7nd\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.065769 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.065778 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d72aaf76-03a2-461b-ac6c-2847186f2359-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.065786 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d72aaf76-03a2-461b-ac6c-2847186f2359-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.297648 4941 generic.go:334] "Generic (PLEG): container finished" podID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerID="cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad" exitCode=0 Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.297696 4941 generic.go:334] "Generic (PLEG): container finished" podID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerID="80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a" exitCode=143 Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.298066 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d72aaf76-03a2-461b-ac6c-2847186f2359","Type":"ContainerDied","Data":"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad"} Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.298384 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d72aaf76-03a2-461b-ac6c-2847186f2359","Type":"ContainerDied","Data":"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a"} Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.298490 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d72aaf76-03a2-461b-ac6c-2847186f2359","Type":"ContainerDied","Data":"72d7d56608fe5ece349497b673e8d2f6c61bc230c43cadfdc78b62e7f4c15d2f"} Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.298612 4941 scope.go:117] "RemoveContainer" containerID="cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.298217 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="5ab7bd0a-4dad-414f-b2da-03b57b471309" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c" gracePeriod=30 Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.298886 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.360082 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.379720 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.393990 4941 scope.go:117] "RemoveContainer" containerID="80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.397303 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:16 crc kubenswrapper[4941]: E1130 07:07:16.397909 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-metadata" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.397932 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-metadata" Nov 30 07:07:16 crc kubenswrapper[4941]: E1130 07:07:16.397944 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-log" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.397952 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-log" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.398152 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-metadata" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.398182 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" containerName="nova-metadata-log" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.399465 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.401626 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.401895 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.412113 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.475855 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmptw\" (UniqueName: \"kubernetes.io/projected/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-kube-api-access-zmptw\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.475914 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.476132 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-config-data\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.476286 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-logs\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.476363 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.491746 4941 scope.go:117] "RemoveContainer" containerID="cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad" Nov 30 07:07:16 crc kubenswrapper[4941]: E1130 07:07:16.492263 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad\": container with ID starting with cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad not found: ID does not exist" containerID="cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.492310 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad"} err="failed to get container status \"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad\": rpc error: code = NotFound desc = could not find container \"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad\": container with ID starting with cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad not found: ID does not exist" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.492366 4941 scope.go:117] "RemoveContainer" containerID="80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a" Nov 30 07:07:16 crc kubenswrapper[4941]: E1130 07:07:16.492755 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a\": container with ID starting with 80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a not found: ID does not exist" containerID="80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.492799 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a"} err="failed to get container status \"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a\": rpc error: code = NotFound desc = could not find container \"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a\": container with ID starting with 80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a not found: ID does not exist" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.492835 4941 scope.go:117] "RemoveContainer" containerID="cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.493200 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad"} err="failed to get container status \"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad\": rpc error: code = NotFound desc = could not find container \"cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad\": container with ID starting with cfc2ee95b20cf504411643c3fd0308520153fe772892e7125d36b420792fdcad not found: ID does not exist" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.493242 4941 scope.go:117] "RemoveContainer" containerID="80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.493538 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a"} err="failed to get container status \"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a\": rpc error: code = NotFound desc = could not find container \"80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a\": container with ID starting with 80cfbc37d455d2f238a4572bef931446dc673c54a5a227ff6da8c242abe05f6a not found: ID does not exist" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.579264 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.581480 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmptw\" (UniqueName: \"kubernetes.io/projected/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-kube-api-access-zmptw\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.581619 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.581750 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-config-data\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.581923 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-logs\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.582500 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-logs\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.584490 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.585909 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-config-data\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.587241 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.601159 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmptw\" (UniqueName: \"kubernetes.io/projected/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-kube-api-access-zmptw\") pod \"nova-metadata-0\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " pod="openstack/nova-metadata-0" Nov 30 07:07:16 crc kubenswrapper[4941]: I1130 07:07:16.787927 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:17 crc kubenswrapper[4941]: I1130 07:07:17.287748 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:17 crc kubenswrapper[4941]: I1130 07:07:17.311772 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"612f9923-9ba7-4e64-bea5-03ad6ca3b02a","Type":"ContainerStarted","Data":"cf4cb6d5cecbf21c3fdfa6f58c219216a74695a91c77496d1441847d5790578c"} Nov 30 07:07:17 crc kubenswrapper[4941]: I1130 07:07:17.533001 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d72aaf76-03a2-461b-ac6c-2847186f2359" path="/var/lib/kubelet/pods/d72aaf76-03a2-461b-ac6c-2847186f2359/volumes" Nov 30 07:07:18 crc kubenswrapper[4941]: I1130 07:07:18.327903 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"612f9923-9ba7-4e64-bea5-03ad6ca3b02a","Type":"ContainerStarted","Data":"d28e869266c6eb55bad476a1e7436f7422a91d87b73d40a6b1d687db7e9fd376"} Nov 30 07:07:18 crc kubenswrapper[4941]: I1130 07:07:18.328320 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"612f9923-9ba7-4e64-bea5-03ad6ca3b02a","Type":"ContainerStarted","Data":"cd1227b504de18f063c685cecbf67d974007f8c494ea1230fae59d6072abbaa1"} Nov 30 07:07:18 crc kubenswrapper[4941]: I1130 07:07:18.370003 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.369930982 podStartE2EDuration="2.369930982s" podCreationTimestamp="2025-11-30 07:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:18.351056376 +0000 UTC m=+1259.119227985" watchObservedRunningTime="2025-11-30 07:07:18.369930982 +0000 UTC m=+1259.138102631" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.342409 4941 generic.go:334] "Generic (PLEG): container finished" podID="4e6a1813-c919-48c6-939b-3e9c74272123" containerID="fff7bdc943505c156c378414fdd2945d48a51f808fe0d3f663c154c310b94566" exitCode=137 Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.342896 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerDied","Data":"fff7bdc943505c156c378414fdd2945d48a51f808fe0d3f663c154c310b94566"} Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.367085 4941 generic.go:334] "Generic (PLEG): container finished" podID="6f615e12-dbe4-4037-ae70-3fe72dade25a" containerID="0c5d9a8e5bf04936fcd7fef9a3fbc0f88f628cb0ea4146cdb0d370970a42b1f4" exitCode=0 Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.368062 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6hd2t" event={"ID":"6f615e12-dbe4-4037-ae70-3fe72dade25a","Type":"ContainerDied","Data":"0c5d9a8e5bf04936fcd7fef9a3fbc0f88f628cb0ea4146cdb0d370970a42b1f4"} Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.546290 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669123 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-scripts\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669348 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-config-data\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669479 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bd7gn\" (UniqueName: \"kubernetes.io/projected/4e6a1813-c919-48c6-939b-3e9c74272123-kube-api-access-bd7gn\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669721 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-sg-core-conf-yaml\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669772 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-run-httpd\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669853 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-combined-ca-bundle\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.669944 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-log-httpd\") pod \"4e6a1813-c919-48c6-939b-3e9c74272123\" (UID: \"4e6a1813-c919-48c6-939b-3e9c74272123\") " Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.670446 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.670567 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.671400 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.671429 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4e6a1813-c919-48c6-939b-3e9c74272123-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.680247 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e6a1813-c919-48c6-939b-3e9c74272123-kube-api-access-bd7gn" (OuterVolumeSpecName: "kube-api-access-bd7gn") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "kube-api-access-bd7gn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.680472 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-scripts" (OuterVolumeSpecName: "scripts") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.728787 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.765638 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.774208 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.774489 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bd7gn\" (UniqueName: \"kubernetes.io/projected/4e6a1813-c919-48c6-939b-3e9c74272123-kube-api-access-bd7gn\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.774586 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.774664 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.794810 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-config-data" (OuterVolumeSpecName: "config-data") pod "4e6a1813-c919-48c6-939b-3e9c74272123" (UID: "4e6a1813-c919-48c6-939b-3e9c74272123"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:19 crc kubenswrapper[4941]: I1130 07:07:19.876661 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e6a1813-c919-48c6-939b-3e9c74272123-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.379697 4941 generic.go:334] "Generic (PLEG): container finished" podID="940ccc92-8cc8-4e78-b862-a7f5fa3d9288" containerID="e3ee661e169cba152a8653fbfa50300784a65ee412f2fc53dfa56f11c302721e" exitCode=0 Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.379770 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" event={"ID":"940ccc92-8cc8-4e78-b862-a7f5fa3d9288","Type":"ContainerDied","Data":"e3ee661e169cba152a8653fbfa50300784a65ee412f2fc53dfa56f11c302721e"} Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.383037 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.383040 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4e6a1813-c919-48c6-939b-3e9c74272123","Type":"ContainerDied","Data":"6832a902a128e4428d353f08cb6478b4069495582ca9727e1e7400cfbb67e0f3"} Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.383093 4941 scope.go:117] "RemoveContainer" containerID="fff7bdc943505c156c378414fdd2945d48a51f808fe0d3f663c154c310b94566" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.439834 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.443269 4941 scope.go:117] "RemoveContainer" containerID="4c266c605530c1a658283a22bf5f7bf90bdb75fea88b84b558373796df8d6e99" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.449465 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.482163 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:20 crc kubenswrapper[4941]: E1130 07:07:20.482746 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-notification-agent" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.482782 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-notification-agent" Nov 30 07:07:20 crc kubenswrapper[4941]: E1130 07:07:20.482797 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-central-agent" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.482808 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-central-agent" Nov 30 07:07:20 crc kubenswrapper[4941]: E1130 07:07:20.482853 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="proxy-httpd" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.482861 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="proxy-httpd" Nov 30 07:07:20 crc kubenswrapper[4941]: E1130 07:07:20.482876 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="sg-core" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.482883 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="sg-core" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.483098 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="sg-core" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.483130 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-notification-agent" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.483141 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="ceilometer-central-agent" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.483158 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" containerName="proxy-httpd" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.487825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.491096 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.491292 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.501079 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.521700 4941 scope.go:117] "RemoveContainer" containerID="87933eeb2641aafacad34979626582fb8f19596280eabdd0756a64eec188eac3" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.563024 4941 scope.go:117] "RemoveContainer" containerID="86b36037a0c21984f09821cee0b08b8da2dba44f9bfc363a6be89cfbb3d2d4b9" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.591274 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-run-httpd\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.591645 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-config-data\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.591786 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-scripts\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.591902 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.592126 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xcnr\" (UniqueName: \"kubernetes.io/projected/2674708b-b628-41e1-b2d0-eab170a354dc-kube-api-access-8xcnr\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.592205 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.593064 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-log-httpd\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696091 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696204 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xcnr\" (UniqueName: \"kubernetes.io/projected/2674708b-b628-41e1-b2d0-eab170a354dc-kube-api-access-8xcnr\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696245 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696286 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-log-httpd\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696344 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-run-httpd\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696384 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-config-data\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.696415 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-scripts\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.697289 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-run-httpd\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.708358 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-scripts\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.708518 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.708681 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.709370 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-log-httpd\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.714519 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-config-data\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.722313 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xcnr\" (UniqueName: \"kubernetes.io/projected/2674708b-b628-41e1-b2d0-eab170a354dc-kube-api-access-8xcnr\") pod \"ceilometer-0\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.813280 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.920726 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:20 crc kubenswrapper[4941]: I1130 07:07:20.977825 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.005304 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-config-data\") pod \"6f615e12-dbe4-4037-ae70-3fe72dade25a\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.005783 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg7z8\" (UniqueName: \"kubernetes.io/projected/6f615e12-dbe4-4037-ae70-3fe72dade25a-kube-api-access-gg7z8\") pod \"6f615e12-dbe4-4037-ae70-3fe72dade25a\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.005829 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-combined-ca-bundle\") pod \"6f615e12-dbe4-4037-ae70-3fe72dade25a\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.005889 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-scripts\") pod \"6f615e12-dbe4-4037-ae70-3fe72dade25a\" (UID: \"6f615e12-dbe4-4037-ae70-3fe72dade25a\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.012832 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f615e12-dbe4-4037-ae70-3fe72dade25a-kube-api-access-gg7z8" (OuterVolumeSpecName: "kube-api-access-gg7z8") pod "6f615e12-dbe4-4037-ae70-3fe72dade25a" (UID: "6f615e12-dbe4-4037-ae70-3fe72dade25a"). InnerVolumeSpecName "kube-api-access-gg7z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.013024 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-scripts" (OuterVolumeSpecName: "scripts") pod "6f615e12-dbe4-4037-ae70-3fe72dade25a" (UID: "6f615e12-dbe4-4037-ae70-3fe72dade25a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.013961 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.045001 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f615e12-dbe4-4037-ae70-3fe72dade25a" (UID: "6f615e12-dbe4-4037-ae70-3fe72dade25a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.067268 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-config-data" (OuterVolumeSpecName: "config-data") pod "6f615e12-dbe4-4037-ae70-3fe72dade25a" (UID: "6f615e12-dbe4-4037-ae70-3fe72dade25a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.079594 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.120002 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.120034 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg7z8\" (UniqueName: \"kubernetes.io/projected/6f615e12-dbe4-4037-ae70-3fe72dade25a-kube-api-access-gg7z8\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.120047 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.120058 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f615e12-dbe4-4037-ae70-3fe72dade25a-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.170773 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-zxqkn"] Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.171010 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerName="dnsmasq-dns" containerID="cri-o://62790034a32cbbc4e258498878ac4ba8007d6cbc4ad8bd0e46ee6b59e8cbd336" gracePeriod=10 Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.173940 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.174004 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.364043 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.433897 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-6hd2t" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.443983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-6hd2t" event={"ID":"6f615e12-dbe4-4037-ae70-3fe72dade25a","Type":"ContainerDied","Data":"016822a10da3106320ce671a136714f53ce53ee51f54be91bcb0c92e005d6c6e"} Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.444056 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="016822a10da3106320ce671a136714f53ce53ee51f54be91bcb0c92e005d6c6e" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.467716 4941 generic.go:334] "Generic (PLEG): container finished" podID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerID="62790034a32cbbc4e258498878ac4ba8007d6cbc4ad8bd0e46ee6b59e8cbd336" exitCode=0 Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.467793 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" event={"ID":"d468fd0f-10b7-46d8-a8f8-ce095f225ff4","Type":"ContainerDied","Data":"62790034a32cbbc4e258498878ac4ba8007d6cbc4ad8bd0e46ee6b59e8cbd336"} Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.472171 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerStarted","Data":"8c0cf1df1388dd474a5abb1afa7a48d1b41076a292ea53e0324684294fb3c135"} Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.577744 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e6a1813-c919-48c6-939b-3e9c74272123" path="/var/lib/kubelet/pods/4e6a1813-c919-48c6-939b-3e9c74272123/volumes" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.672054 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.672354 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-log" containerID="cri-o://ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9" gracePeriod=30 Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.672799 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-api" containerID="cri-o://8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72" gracePeriod=30 Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.691451 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": EOF" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.691530 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": EOF" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.691616 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.704665 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.716072 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.716481 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-log" containerID="cri-o://cd1227b504de18f063c685cecbf67d974007f8c494ea1230fae59d6072abbaa1" gracePeriod=30 Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.717208 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-metadata" containerID="cri-o://d28e869266c6eb55bad476a1e7436f7422a91d87b73d40a6b1d687db7e9fd376" gracePeriod=30 Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.789008 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.789065 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.797536 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.869951 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts5lj\" (UniqueName: \"kubernetes.io/projected/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-kube-api-access-ts5lj\") pod \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.870445 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-nb\") pod \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.870479 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-sb\") pod \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.870570 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-svc\") pod \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.870736 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-swift-storage-0\") pod \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.871041 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-config\") pod \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\" (UID: \"d468fd0f-10b7-46d8-a8f8-ce095f225ff4\") " Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.876198 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-kube-api-access-ts5lj" (OuterVolumeSpecName: "kube-api-access-ts5lj") pod "d468fd0f-10b7-46d8-a8f8-ce095f225ff4" (UID: "d468fd0f-10b7-46d8-a8f8-ce095f225ff4"). InnerVolumeSpecName "kube-api-access-ts5lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.932504 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-config" (OuterVolumeSpecName: "config") pod "d468fd0f-10b7-46d8-a8f8-ce095f225ff4" (UID: "d468fd0f-10b7-46d8-a8f8-ce095f225ff4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.955963 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d468fd0f-10b7-46d8-a8f8-ce095f225ff4" (UID: "d468fd0f-10b7-46d8-a8f8-ce095f225ff4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.967719 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d468fd0f-10b7-46d8-a8f8-ce095f225ff4" (UID: "d468fd0f-10b7-46d8-a8f8-ce095f225ff4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.974152 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts5lj\" (UniqueName: \"kubernetes.io/projected/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-kube-api-access-ts5lj\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.974182 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.974193 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.974203 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.978917 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d468fd0f-10b7-46d8-a8f8-ce095f225ff4" (UID: "d468fd0f-10b7-46d8-a8f8-ce095f225ff4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.983658 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d468fd0f-10b7-46d8-a8f8-ce095f225ff4" (UID: "d468fd0f-10b7-46d8-a8f8-ce095f225ff4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:07:21 crc kubenswrapper[4941]: I1130 07:07:21.995019 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.076113 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-combined-ca-bundle\") pod \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.076174 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-scripts\") pod \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.076233 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdc5q\" (UniqueName: \"kubernetes.io/projected/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-kube-api-access-cdc5q\") pod \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.076438 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-config-data\") pod \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\" (UID: \"940ccc92-8cc8-4e78-b862-a7f5fa3d9288\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.076822 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.076832 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d468fd0f-10b7-46d8-a8f8-ce095f225ff4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.082505 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-scripts" (OuterVolumeSpecName: "scripts") pod "940ccc92-8cc8-4e78-b862-a7f5fa3d9288" (UID: "940ccc92-8cc8-4e78-b862-a7f5fa3d9288"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.086506 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-kube-api-access-cdc5q" (OuterVolumeSpecName: "kube-api-access-cdc5q") pod "940ccc92-8cc8-4e78-b862-a7f5fa3d9288" (UID: "940ccc92-8cc8-4e78-b862-a7f5fa3d9288"). InnerVolumeSpecName "kube-api-access-cdc5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.114139 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "940ccc92-8cc8-4e78-b862-a7f5fa3d9288" (UID: "940ccc92-8cc8-4e78-b862-a7f5fa3d9288"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.132014 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-config-data" (OuterVolumeSpecName: "config-data") pod "940ccc92-8cc8-4e78-b862-a7f5fa3d9288" (UID: "940ccc92-8cc8-4e78-b862-a7f5fa3d9288"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.178232 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.178274 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.178290 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.178302 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdc5q\" (UniqueName: \"kubernetes.io/projected/940ccc92-8cc8-4e78-b862-a7f5fa3d9288-kube-api-access-cdc5q\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.501263 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 07:07:22 crc kubenswrapper[4941]: E1130 07:07:22.502117 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerName="init" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502129 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerName="init" Nov 30 07:07:22 crc kubenswrapper[4941]: E1130 07:07:22.502143 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerName="dnsmasq-dns" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502149 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerName="dnsmasq-dns" Nov 30 07:07:22 crc kubenswrapper[4941]: E1130 07:07:22.502173 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940ccc92-8cc8-4e78-b862-a7f5fa3d9288" containerName="nova-cell1-conductor-db-sync" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502180 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="940ccc92-8cc8-4e78-b862-a7f5fa3d9288" containerName="nova-cell1-conductor-db-sync" Nov 30 07:07:22 crc kubenswrapper[4941]: E1130 07:07:22.502190 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f615e12-dbe4-4037-ae70-3fe72dade25a" containerName="nova-manage" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502196 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f615e12-dbe4-4037-ae70-3fe72dade25a" containerName="nova-manage" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502424 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" containerName="dnsmasq-dns" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502444 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="940ccc92-8cc8-4e78-b862-a7f5fa3d9288" containerName="nova-cell1-conductor-db-sync" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.502454 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f615e12-dbe4-4037-ae70-3fe72dade25a" containerName="nova-manage" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.503120 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.513342 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.514877 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" event={"ID":"940ccc92-8cc8-4e78-b862-a7f5fa3d9288","Type":"ContainerDied","Data":"b54942982f400cab903eb1007a6f5518f8406a93b79413f9752233a095f779ff"} Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.514910 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b54942982f400cab903eb1007a6f5518f8406a93b79413f9752233a095f779ff" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.514974 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-jn7qf" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.528351 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.528361 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8ccb5c7cf-zxqkn" event={"ID":"d468fd0f-10b7-46d8-a8f8-ce095f225ff4","Type":"ContainerDied","Data":"f13e9c4f02c4f1cf39d89517a82023c510de18a1e28980af1059c239cd0a51d9"} Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.528419 4941 scope.go:117] "RemoveContainer" containerID="62790034a32cbbc4e258498878ac4ba8007d6cbc4ad8bd0e46ee6b59e8cbd336" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.533064 4941 generic.go:334] "Generic (PLEG): container finished" podID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerID="d28e869266c6eb55bad476a1e7436f7422a91d87b73d40a6b1d687db7e9fd376" exitCode=0 Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.533088 4941 generic.go:334] "Generic (PLEG): container finished" podID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerID="cd1227b504de18f063c685cecbf67d974007f8c494ea1230fae59d6072abbaa1" exitCode=143 Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.533133 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"612f9923-9ba7-4e64-bea5-03ad6ca3b02a","Type":"ContainerDied","Data":"d28e869266c6eb55bad476a1e7436f7422a91d87b73d40a6b1d687db7e9fd376"} Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.533158 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"612f9923-9ba7-4e64-bea5-03ad6ca3b02a","Type":"ContainerDied","Data":"cd1227b504de18f063c685cecbf67d974007f8c494ea1230fae59d6072abbaa1"} Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.537282 4941 generic.go:334] "Generic (PLEG): container finished" podID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerID="ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9" exitCode=143 Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.537367 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6403f017-3e24-4451-85c1-52fd4eef0ed7","Type":"ContainerDied","Data":"ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9"} Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.539244 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3adcdb0b-5b85-4649-bd98-04e946483dc5" containerName="nova-scheduler-scheduler" containerID="cri-o://b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" gracePeriod=30 Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.539552 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerStarted","Data":"f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53"} Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.576680 4941 scope.go:117] "RemoveContainer" containerID="a6bd0dc4af0fd797134cd9cdf820d248e4d3f79d4b67f746cf051f0bcda68bb0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.601485 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.601572 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twwp4\" (UniqueName: \"kubernetes.io/projected/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-kube-api-access-twwp4\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.601642 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.638540 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.641054 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-zxqkn"] Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.649943 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8ccb5c7cf-zxqkn"] Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.703897 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-combined-ca-bundle\") pod \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704028 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmptw\" (UniqueName: \"kubernetes.io/projected/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-kube-api-access-zmptw\") pod \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704051 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-config-data\") pod \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704114 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-logs\") pod \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704244 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-nova-metadata-tls-certs\") pod \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\" (UID: \"612f9923-9ba7-4e64-bea5-03ad6ca3b02a\") " Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704578 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704611 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twwp4\" (UniqueName: \"kubernetes.io/projected/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-kube-api-access-twwp4\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.704645 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.705859 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-logs" (OuterVolumeSpecName: "logs") pod "612f9923-9ba7-4e64-bea5-03ad6ca3b02a" (UID: "612f9923-9ba7-4e64-bea5-03ad6ca3b02a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.713244 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.717896 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.718309 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-kube-api-access-zmptw" (OuterVolumeSpecName: "kube-api-access-zmptw") pod "612f9923-9ba7-4e64-bea5-03ad6ca3b02a" (UID: "612f9923-9ba7-4e64-bea5-03ad6ca3b02a"). InnerVolumeSpecName "kube-api-access-zmptw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.728003 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twwp4\" (UniqueName: \"kubernetes.io/projected/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-kube-api-access-twwp4\") pod \"nova-cell1-conductor-0\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.740621 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "612f9923-9ba7-4e64-bea5-03ad6ca3b02a" (UID: "612f9923-9ba7-4e64-bea5-03ad6ca3b02a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.745509 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-config-data" (OuterVolumeSpecName: "config-data") pod "612f9923-9ba7-4e64-bea5-03ad6ca3b02a" (UID: "612f9923-9ba7-4e64-bea5-03ad6ca3b02a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.799574 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "612f9923-9ba7-4e64-bea5-03ad6ca3b02a" (UID: "612f9923-9ba7-4e64-bea5-03ad6ca3b02a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.806811 4941 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.806918 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.806976 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmptw\" (UniqueName: \"kubernetes.io/projected/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-kube-api-access-zmptw\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.807156 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.807218 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612f9923-9ba7-4e64-bea5-03ad6ca3b02a-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:22 crc kubenswrapper[4941]: I1130 07:07:22.923650 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.397883 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 07:07:23 crc kubenswrapper[4941]: W1130 07:07:23.406936 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e3c777d_2ebb_447f_a8a4_7fb17e59d3ce.slice/crio-58b00ea84b7c014a2146414a3379487e3a4465eba343c42a24a4d5608f5cef53 WatchSource:0}: Error finding container 58b00ea84b7c014a2146414a3379487e3a4465eba343c42a24a4d5608f5cef53: Status 404 returned error can't find the container with id 58b00ea84b7c014a2146414a3379487e3a4465eba343c42a24a4d5608f5cef53 Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.536013 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d468fd0f-10b7-46d8-a8f8-ce095f225ff4" path="/var/lib/kubelet/pods/d468fd0f-10b7-46d8-a8f8-ce095f225ff4/volumes" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.562193 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.562567 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"612f9923-9ba7-4e64-bea5-03ad6ca3b02a","Type":"ContainerDied","Data":"cf4cb6d5cecbf21c3fdfa6f58c219216a74695a91c77496d1441847d5790578c"} Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.562638 4941 scope.go:117] "RemoveContainer" containerID="d28e869266c6eb55bad476a1e7436f7422a91d87b73d40a6b1d687db7e9fd376" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.570108 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce","Type":"ContainerStarted","Data":"58b00ea84b7c014a2146414a3379487e3a4465eba343c42a24a4d5608f5cef53"} Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.633421 4941 scope.go:117] "RemoveContainer" containerID="cd1227b504de18f063c685cecbf67d974007f8c494ea1230fae59d6072abbaa1" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.661415 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.676865 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.686389 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:23 crc kubenswrapper[4941]: E1130 07:07:23.686881 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-metadata" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.686900 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-metadata" Nov 30 07:07:23 crc kubenswrapper[4941]: E1130 07:07:23.686913 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-log" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.686920 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-log" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.687081 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-metadata" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.687110 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" containerName="nova-metadata-log" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.688112 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.693984 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.694187 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.696268 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.750349 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-config-data\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.750410 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w97f8\" (UniqueName: \"kubernetes.io/projected/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-kube-api-access-w97f8\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.750435 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-logs\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.750458 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.750758 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.854219 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.854915 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-config-data\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.855011 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w97f8\" (UniqueName: \"kubernetes.io/projected/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-kube-api-access-w97f8\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.855049 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-logs\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.855091 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.855571 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-logs\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.859843 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.860104 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-config-data\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.860718 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:23 crc kubenswrapper[4941]: I1130 07:07:23.871912 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w97f8\" (UniqueName: \"kubernetes.io/projected/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-kube-api-access-w97f8\") pod \"nova-metadata-0\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " pod="openstack/nova-metadata-0" Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.017560 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.502559 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.598831 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce","Type":"ContainerStarted","Data":"54724f9e7b9cb397a6dbf2bf4b7b305271ebdeef511fa6d99df016ffce4d170d"} Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.600559 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.603052 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerStarted","Data":"397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e"} Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.603071 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerStarted","Data":"8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa"} Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.603984 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b310ca61-cc2e-42ae-9f15-79f8c6f38b46","Type":"ContainerStarted","Data":"62b0f017272706249ee9e24691957f01c744f70be312aec01dd64ad948cf47b9"} Nov 30 07:07:24 crc kubenswrapper[4941]: I1130 07:07:24.651014 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.6509958019999997 podStartE2EDuration="2.650995802s" podCreationTimestamp="2025-11-30 07:07:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:24.642854464 +0000 UTC m=+1265.411026083" watchObservedRunningTime="2025-11-30 07:07:24.650995802 +0000 UTC m=+1265.419167411" Nov 30 07:07:25 crc kubenswrapper[4941]: I1130 07:07:25.533799 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="612f9923-9ba7-4e64-bea5-03ad6ca3b02a" path="/var/lib/kubelet/pods/612f9923-9ba7-4e64-bea5-03ad6ca3b02a/volumes" Nov 30 07:07:25 crc kubenswrapper[4941]: I1130 07:07:25.616878 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b310ca61-cc2e-42ae-9f15-79f8c6f38b46","Type":"ContainerStarted","Data":"18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f"} Nov 30 07:07:25 crc kubenswrapper[4941]: I1130 07:07:25.617122 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b310ca61-cc2e-42ae-9f15-79f8c6f38b46","Type":"ContainerStarted","Data":"b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d"} Nov 30 07:07:25 crc kubenswrapper[4941]: I1130 07:07:25.640569 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.640552201 podStartE2EDuration="2.640552201s" podCreationTimestamp="2025-11-30 07:07:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:25.63167555 +0000 UTC m=+1266.399847159" watchObservedRunningTime="2025-11-30 07:07:25.640552201 +0000 UTC m=+1266.408723800" Nov 30 07:07:25 crc kubenswrapper[4941]: E1130 07:07:25.982410 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:07:25 crc kubenswrapper[4941]: E1130 07:07:25.985005 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:07:25 crc kubenswrapper[4941]: E1130 07:07:25.987232 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:07:25 crc kubenswrapper[4941]: E1130 07:07:25.987369 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3adcdb0b-5b85-4649-bd98-04e946483dc5" containerName="nova-scheduler-scheduler" Nov 30 07:07:26 crc kubenswrapper[4941]: I1130 07:07:26.655350 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerStarted","Data":"ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5"} Nov 30 07:07:26 crc kubenswrapper[4941]: I1130 07:07:26.695146 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.309477789 podStartE2EDuration="6.695124898s" podCreationTimestamp="2025-11-30 07:07:20 +0000 UTC" firstStartedPulling="2025-11-30 07:07:21.393455276 +0000 UTC m=+1262.161626885" lastFinishedPulling="2025-11-30 07:07:25.779102385 +0000 UTC m=+1266.547273994" observedRunningTime="2025-11-30 07:07:26.681093709 +0000 UTC m=+1267.449265358" watchObservedRunningTime="2025-11-30 07:07:26.695124898 +0000 UTC m=+1267.463296507" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.042936 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.227340 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv6sm\" (UniqueName: \"kubernetes.io/projected/3adcdb0b-5b85-4649-bd98-04e946483dc5-kube-api-access-nv6sm\") pod \"3adcdb0b-5b85-4649-bd98-04e946483dc5\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.227454 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-config-data\") pod \"3adcdb0b-5b85-4649-bd98-04e946483dc5\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.227621 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-combined-ca-bundle\") pod \"3adcdb0b-5b85-4649-bd98-04e946483dc5\" (UID: \"3adcdb0b-5b85-4649-bd98-04e946483dc5\") " Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.235548 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3adcdb0b-5b85-4649-bd98-04e946483dc5-kube-api-access-nv6sm" (OuterVolumeSpecName: "kube-api-access-nv6sm") pod "3adcdb0b-5b85-4649-bd98-04e946483dc5" (UID: "3adcdb0b-5b85-4649-bd98-04e946483dc5"). InnerVolumeSpecName "kube-api-access-nv6sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.270027 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3adcdb0b-5b85-4649-bd98-04e946483dc5" (UID: "3adcdb0b-5b85-4649-bd98-04e946483dc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.270524 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-config-data" (OuterVolumeSpecName: "config-data") pod "3adcdb0b-5b85-4649-bd98-04e946483dc5" (UID: "3adcdb0b-5b85-4649-bd98-04e946483dc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.330711 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.330758 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv6sm\" (UniqueName: \"kubernetes.io/projected/3adcdb0b-5b85-4649-bd98-04e946483dc5-kube-api-access-nv6sm\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.330963 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3adcdb0b-5b85-4649-bd98-04e946483dc5-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.669854 4941 generic.go:334] "Generic (PLEG): container finished" podID="3adcdb0b-5b85-4649-bd98-04e946483dc5" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" exitCode=0 Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.671456 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.671912 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3adcdb0b-5b85-4649-bd98-04e946483dc5","Type":"ContainerDied","Data":"b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5"} Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.671949 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.671965 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3adcdb0b-5b85-4649-bd98-04e946483dc5","Type":"ContainerDied","Data":"0cc9229c8179385452483d6777dfc5374b7176ae81d8c6395738c3292d1b8767"} Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.672399 4941 scope.go:117] "RemoveContainer" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.707643 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.724788 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.733309 4941 scope.go:117] "RemoveContainer" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" Nov 30 07:07:27 crc kubenswrapper[4941]: E1130 07:07:27.733876 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5\": container with ID starting with b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5 not found: ID does not exist" containerID="b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.733927 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5"} err="failed to get container status \"b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5\": rpc error: code = NotFound desc = could not find container \"b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5\": container with ID starting with b371f08e5a471b4472e4f826356fa54801822d442c8c17b8316ba92577e307d5 not found: ID does not exist" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.751688 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:27 crc kubenswrapper[4941]: E1130 07:07:27.757461 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3adcdb0b-5b85-4649-bd98-04e946483dc5" containerName="nova-scheduler-scheduler" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.757521 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3adcdb0b-5b85-4649-bd98-04e946483dc5" containerName="nova-scheduler-scheduler" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.758032 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3adcdb0b-5b85-4649-bd98-04e946483dc5" containerName="nova-scheduler-scheduler" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.758821 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.763025 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.763127 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.846355 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.846438 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-config-data\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.846475 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlgnd\" (UniqueName: \"kubernetes.io/projected/62484167-fde9-4dcf-9363-ae6eb0a12d3c-kube-api-access-hlgnd\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.971406 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.971476 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-config-data\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.971502 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlgnd\" (UniqueName: \"kubernetes.io/projected/62484167-fde9-4dcf-9363-ae6eb0a12d3c-kube-api-access-hlgnd\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.978353 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.987153 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlgnd\" (UniqueName: \"kubernetes.io/projected/62484167-fde9-4dcf-9363-ae6eb0a12d3c-kube-api-access-hlgnd\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:27 crc kubenswrapper[4941]: I1130 07:07:27.994297 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-config-data\") pod \"nova-scheduler-0\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " pod="openstack/nova-scheduler-0" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.089717 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:07:28 crc kubenswrapper[4941]: W1130 07:07:28.543094 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62484167_fde9_4dcf_9363_ae6eb0a12d3c.slice/crio-6ef2da646941c64fdfce0fc1bd0e5b9f598ce2f46f6fc8a3e4b9ca624e87b18e WatchSource:0}: Error finding container 6ef2da646941c64fdfce0fc1bd0e5b9f598ce2f46f6fc8a3e4b9ca624e87b18e: Status 404 returned error can't find the container with id 6ef2da646941c64fdfce0fc1bd0e5b9f598ce2f46f6fc8a3e4b9ca624e87b18e Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.545572 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.587388 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.682410 4941 generic.go:334] "Generic (PLEG): container finished" podID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerID="8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72" exitCode=0 Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.682640 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6403f017-3e24-4451-85c1-52fd4eef0ed7","Type":"ContainerDied","Data":"8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72"} Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.682746 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.682770 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6403f017-3e24-4451-85c1-52fd4eef0ed7","Type":"ContainerDied","Data":"4d982826139dba5d3606cdcbd2b8346337a93f55f6b580174449cc4fde9c9e4c"} Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.682794 4941 scope.go:117] "RemoveContainer" containerID="8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.685009 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"62484167-fde9-4dcf-9363-ae6eb0a12d3c","Type":"ContainerStarted","Data":"6ef2da646941c64fdfce0fc1bd0e5b9f598ce2f46f6fc8a3e4b9ca624e87b18e"} Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.686764 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jpw4\" (UniqueName: \"kubernetes.io/projected/6403f017-3e24-4451-85c1-52fd4eef0ed7-kube-api-access-7jpw4\") pod \"6403f017-3e24-4451-85c1-52fd4eef0ed7\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.686855 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-combined-ca-bundle\") pod \"6403f017-3e24-4451-85c1-52fd4eef0ed7\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.687079 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-config-data\") pod \"6403f017-3e24-4451-85c1-52fd4eef0ed7\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.687130 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6403f017-3e24-4451-85c1-52fd4eef0ed7-logs\") pod \"6403f017-3e24-4451-85c1-52fd4eef0ed7\" (UID: \"6403f017-3e24-4451-85c1-52fd4eef0ed7\") " Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.687900 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6403f017-3e24-4451-85c1-52fd4eef0ed7-logs" (OuterVolumeSpecName: "logs") pod "6403f017-3e24-4451-85c1-52fd4eef0ed7" (UID: "6403f017-3e24-4451-85c1-52fd4eef0ed7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.689028 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6403f017-3e24-4451-85c1-52fd4eef0ed7-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.691982 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6403f017-3e24-4451-85c1-52fd4eef0ed7-kube-api-access-7jpw4" (OuterVolumeSpecName: "kube-api-access-7jpw4") pod "6403f017-3e24-4451-85c1-52fd4eef0ed7" (UID: "6403f017-3e24-4451-85c1-52fd4eef0ed7"). InnerVolumeSpecName "kube-api-access-7jpw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.706038 4941 scope.go:117] "RemoveContainer" containerID="ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.725971 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6403f017-3e24-4451-85c1-52fd4eef0ed7" (UID: "6403f017-3e24-4451-85c1-52fd4eef0ed7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.729918 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-config-data" (OuterVolumeSpecName: "config-data") pod "6403f017-3e24-4451-85c1-52fd4eef0ed7" (UID: "6403f017-3e24-4451-85c1-52fd4eef0ed7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.733709 4941 scope.go:117] "RemoveContainer" containerID="8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72" Nov 30 07:07:28 crc kubenswrapper[4941]: E1130 07:07:28.734219 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72\": container with ID starting with 8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72 not found: ID does not exist" containerID="8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.734257 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72"} err="failed to get container status \"8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72\": rpc error: code = NotFound desc = could not find container \"8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72\": container with ID starting with 8b44123096127a79ba24097a80e5370544e3b8bc10dc831457fb0c16e25cfd72 not found: ID does not exist" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.734284 4941 scope.go:117] "RemoveContainer" containerID="ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9" Nov 30 07:07:28 crc kubenswrapper[4941]: E1130 07:07:28.734758 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9\": container with ID starting with ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9 not found: ID does not exist" containerID="ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.734795 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9"} err="failed to get container status \"ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9\": rpc error: code = NotFound desc = could not find container \"ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9\": container with ID starting with ae665a2380609ddb8d3d27081b49aa1c8f3c4fcc490bae42ecb4d98eacbf67b9 not found: ID does not exist" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.793450 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.793495 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jpw4\" (UniqueName: \"kubernetes.io/projected/6403f017-3e24-4451-85c1-52fd4eef0ed7-kube-api-access-7jpw4\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:28 crc kubenswrapper[4941]: I1130 07:07:28.793506 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6403f017-3e24-4451-85c1-52fd4eef0ed7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.019072 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.019160 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.042987 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.052088 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.077168 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:29 crc kubenswrapper[4941]: E1130 07:07:29.077747 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-log" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.077770 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-log" Nov 30 07:07:29 crc kubenswrapper[4941]: E1130 07:07:29.077782 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-api" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.077790 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-api" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.078009 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-log" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.078041 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" containerName="nova-api-api" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.079216 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.082460 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.098799 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-config-data\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.098850 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.098921 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fqgf\" (UniqueName: \"kubernetes.io/projected/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-kube-api-access-2fqgf\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.098999 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-logs\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.107912 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.201099 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-logs\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.201222 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-config-data\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.201267 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.201380 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fqgf\" (UniqueName: \"kubernetes.io/projected/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-kube-api-access-2fqgf\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.201648 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-logs\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.206142 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.214558 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-config-data\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.221491 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fqgf\" (UniqueName: \"kubernetes.io/projected/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-kube-api-access-2fqgf\") pod \"nova-api-0\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.396313 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.594116 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3adcdb0b-5b85-4649-bd98-04e946483dc5" path="/var/lib/kubelet/pods/3adcdb0b-5b85-4649-bd98-04e946483dc5/volumes" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.595475 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6403f017-3e24-4451-85c1-52fd4eef0ed7" path="/var/lib/kubelet/pods/6403f017-3e24-4451-85c1-52fd4eef0ed7/volumes" Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.697857 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"62484167-fde9-4dcf-9363-ae6eb0a12d3c","Type":"ContainerStarted","Data":"7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f"} Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.721947 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.721927241 podStartE2EDuration="2.721927241s" podCreationTimestamp="2025-11-30 07:07:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:29.719032422 +0000 UTC m=+1270.487204041" watchObservedRunningTime="2025-11-30 07:07:29.721927241 +0000 UTC m=+1270.490098850" Nov 30 07:07:29 crc kubenswrapper[4941]: W1130 07:07:29.882708 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a7e830a_370a_47e5_a1f8_08025b7c7bfd.slice/crio-2038f1df53f731b9c65e574dc232805a771cf6552eb07f223c84287a856ad7bb WatchSource:0}: Error finding container 2038f1df53f731b9c65e574dc232805a771cf6552eb07f223c84287a856ad7bb: Status 404 returned error can't find the container with id 2038f1df53f731b9c65e574dc232805a771cf6552eb07f223c84287a856ad7bb Nov 30 07:07:29 crc kubenswrapper[4941]: I1130 07:07:29.885708 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:30 crc kubenswrapper[4941]: I1130 07:07:30.711004 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a7e830a-370a-47e5-a1f8-08025b7c7bfd","Type":"ContainerStarted","Data":"1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805"} Nov 30 07:07:30 crc kubenswrapper[4941]: I1130 07:07:30.711592 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a7e830a-370a-47e5-a1f8-08025b7c7bfd","Type":"ContainerStarted","Data":"fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e"} Nov 30 07:07:30 crc kubenswrapper[4941]: I1130 07:07:30.711608 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a7e830a-370a-47e5-a1f8-08025b7c7bfd","Type":"ContainerStarted","Data":"2038f1df53f731b9c65e574dc232805a771cf6552eb07f223c84287a856ad7bb"} Nov 30 07:07:30 crc kubenswrapper[4941]: I1130 07:07:30.737670 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.737649111 podStartE2EDuration="1.737649111s" podCreationTimestamp="2025-11-30 07:07:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:30.732864904 +0000 UTC m=+1271.501036553" watchObservedRunningTime="2025-11-30 07:07:30.737649111 +0000 UTC m=+1271.505820720" Nov 30 07:07:32 crc kubenswrapper[4941]: I1130 07:07:32.979118 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:07:32 crc kubenswrapper[4941]: I1130 07:07:32.979241 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 30 07:07:32 crc kubenswrapper[4941]: I1130 07:07:32.979617 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:07:32 crc kubenswrapper[4941]: I1130 07:07:32.979688 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:07:32 crc kubenswrapper[4941]: I1130 07:07:32.980918 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03f6ba41bf367eb3ad6a0ca9a42efb4ebf757994a2964d83030fd8e83c2c7d32"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:07:32 crc kubenswrapper[4941]: I1130 07:07:32.980990 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://03f6ba41bf367eb3ad6a0ca9a42efb4ebf757994a2964d83030fd8e83c2c7d32" gracePeriod=600 Nov 30 07:07:33 crc kubenswrapper[4941]: I1130 07:07:33.109765 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 07:07:33 crc kubenswrapper[4941]: I1130 07:07:33.741205 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="03f6ba41bf367eb3ad6a0ca9a42efb4ebf757994a2964d83030fd8e83c2c7d32" exitCode=0 Nov 30 07:07:33 crc kubenswrapper[4941]: I1130 07:07:33.741254 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"03f6ba41bf367eb3ad6a0ca9a42efb4ebf757994a2964d83030fd8e83c2c7d32"} Nov 30 07:07:33 crc kubenswrapper[4941]: I1130 07:07:33.741846 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"9fa56f4fe3d0a299069614d024c15c66b787f4d645343ae4c789d83f64a98208"} Nov 30 07:07:33 crc kubenswrapper[4941]: I1130 07:07:33.741878 4941 scope.go:117] "RemoveContainer" containerID="c56f9f6e36d888e40d67920be6ce9775fda0a6740b4e0f2b802e64b1e8fb285b" Nov 30 07:07:34 crc kubenswrapper[4941]: I1130 07:07:34.019068 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 07:07:34 crc kubenswrapper[4941]: I1130 07:07:34.019108 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 07:07:35 crc kubenswrapper[4941]: I1130 07:07:35.033453 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.186:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 07:07:35 crc kubenswrapper[4941]: I1130 07:07:35.033448 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.186:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 30 07:07:38 crc kubenswrapper[4941]: I1130 07:07:38.090559 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 07:07:38 crc kubenswrapper[4941]: I1130 07:07:38.123653 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 07:07:38 crc kubenswrapper[4941]: I1130 07:07:38.839069 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 07:07:39 crc kubenswrapper[4941]: I1130 07:07:39.397441 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 07:07:39 crc kubenswrapper[4941]: I1130 07:07:39.397538 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 07:07:40 crc kubenswrapper[4941]: I1130 07:07:40.480654 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 07:07:40 crc kubenswrapper[4941]: I1130 07:07:40.480722 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 07:07:44 crc kubenswrapper[4941]: I1130 07:07:44.029582 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 07:07:44 crc kubenswrapper[4941]: I1130 07:07:44.030357 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 07:07:44 crc kubenswrapper[4941]: I1130 07:07:44.039467 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 07:07:44 crc kubenswrapper[4941]: I1130 07:07:44.040515 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.764772 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.874478 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fth5\" (UniqueName: \"kubernetes.io/projected/5ab7bd0a-4dad-414f-b2da-03b57b471309-kube-api-access-2fth5\") pod \"5ab7bd0a-4dad-414f-b2da-03b57b471309\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.874526 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-config-data\") pod \"5ab7bd0a-4dad-414f-b2da-03b57b471309\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.874634 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-combined-ca-bundle\") pod \"5ab7bd0a-4dad-414f-b2da-03b57b471309\" (UID: \"5ab7bd0a-4dad-414f-b2da-03b57b471309\") " Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.882870 4941 generic.go:334] "Generic (PLEG): container finished" podID="5ab7bd0a-4dad-414f-b2da-03b57b471309" containerID="df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c" exitCode=137 Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.882923 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ab7bd0a-4dad-414f-b2da-03b57b471309","Type":"ContainerDied","Data":"df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c"} Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.882956 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5ab7bd0a-4dad-414f-b2da-03b57b471309","Type":"ContainerDied","Data":"1d51be00db5d274d0cbb067058f3ff8f9d655c227b9d284110de41b192681229"} Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.882977 4941 scope.go:117] "RemoveContainer" containerID="df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.883121 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.887520 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ab7bd0a-4dad-414f-b2da-03b57b471309-kube-api-access-2fth5" (OuterVolumeSpecName: "kube-api-access-2fth5") pod "5ab7bd0a-4dad-414f-b2da-03b57b471309" (UID: "5ab7bd0a-4dad-414f-b2da-03b57b471309"). InnerVolumeSpecName "kube-api-access-2fth5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.902504 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-config-data" (OuterVolumeSpecName: "config-data") pod "5ab7bd0a-4dad-414f-b2da-03b57b471309" (UID: "5ab7bd0a-4dad-414f-b2da-03b57b471309"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.911053 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ab7bd0a-4dad-414f-b2da-03b57b471309" (UID: "5ab7bd0a-4dad-414f-b2da-03b57b471309"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.977056 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.977083 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fth5\" (UniqueName: \"kubernetes.io/projected/5ab7bd0a-4dad-414f-b2da-03b57b471309-kube-api-access-2fth5\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:46 crc kubenswrapper[4941]: I1130 07:07:46.977095 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ab7bd0a-4dad-414f-b2da-03b57b471309-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.016469 4941 scope.go:117] "RemoveContainer" containerID="df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c" Nov 30 07:07:47 crc kubenswrapper[4941]: E1130 07:07:47.017529 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c\": container with ID starting with df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c not found: ID does not exist" containerID="df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.017581 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c"} err="failed to get container status \"df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c\": rpc error: code = NotFound desc = could not find container \"df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c\": container with ID starting with df0823f4faeb4c2520c6f3eff2b592fcc5d6f09aa479c5c4620308abe740025c not found: ID does not exist" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.216805 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.237043 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.256691 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:47 crc kubenswrapper[4941]: E1130 07:07:47.257242 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ab7bd0a-4dad-414f-b2da-03b57b471309" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.257263 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ab7bd0a-4dad-414f-b2da-03b57b471309" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.257545 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ab7bd0a-4dad-414f-b2da-03b57b471309" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.258377 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.260747 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.260775 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.263717 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.283197 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.283866 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.283921 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9zgr\" (UniqueName: \"kubernetes.io/projected/f311bacd-2cef-44fe-95c4-38a7462cd4a6-kube-api-access-t9zgr\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.283972 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.283992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.284026 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.386374 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.386434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9zgr\" (UniqueName: \"kubernetes.io/projected/f311bacd-2cef-44fe-95c4-38a7462cd4a6-kube-api-access-t9zgr\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.386487 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.386505 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.386535 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.391708 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.391843 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.391907 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.399422 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.404045 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9zgr\" (UniqueName: \"kubernetes.io/projected/f311bacd-2cef-44fe-95c4-38a7462cd4a6-kube-api-access-t9zgr\") pod \"nova-cell1-novncproxy-0\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.539493 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ab7bd0a-4dad-414f-b2da-03b57b471309" path="/var/lib/kubelet/pods/5ab7bd0a-4dad-414f-b2da-03b57b471309/volumes" Nov 30 07:07:47 crc kubenswrapper[4941]: I1130 07:07:47.588620 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:48 crc kubenswrapper[4941]: I1130 07:07:48.030595 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:07:48 crc kubenswrapper[4941]: I1130 07:07:48.922196 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f311bacd-2cef-44fe-95c4-38a7462cd4a6","Type":"ContainerStarted","Data":"199b3e26fc0f6545467dc0bfc386bbb742110c36e2dd4105695b124948c0d840"} Nov 30 07:07:48 crc kubenswrapper[4941]: I1130 07:07:48.922546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f311bacd-2cef-44fe-95c4-38a7462cd4a6","Type":"ContainerStarted","Data":"bfe1241d7ec6909462dd3e55c931e8058b61f0aba5c9ecbb83a16810195dbf61"} Nov 30 07:07:48 crc kubenswrapper[4941]: I1130 07:07:48.949797 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.949758264 podStartE2EDuration="1.949758264s" podCreationTimestamp="2025-11-30 07:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:48.947041871 +0000 UTC m=+1289.715213530" watchObservedRunningTime="2025-11-30 07:07:48.949758264 +0000 UTC m=+1289.717929903" Nov 30 07:07:49 crc kubenswrapper[4941]: I1130 07:07:49.401778 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 07:07:49 crc kubenswrapper[4941]: I1130 07:07:49.402906 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 07:07:49 crc kubenswrapper[4941]: I1130 07:07:49.404720 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 07:07:49 crc kubenswrapper[4941]: I1130 07:07:49.406283 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 07:07:49 crc kubenswrapper[4941]: I1130 07:07:49.934144 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 07:07:49 crc kubenswrapper[4941]: I1130 07:07:49.938939 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.182928 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf"] Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.190586 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.225666 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf"] Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.252420 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.252464 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-swift-storage-0\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.252587 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-config\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.252621 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.252669 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-svc\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.252687 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gpc2\" (UniqueName: \"kubernetes.io/projected/674f83a3-0419-43d7-a679-fed1bf09b047-kube-api-access-8gpc2\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.354486 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-config\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.354549 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.354602 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-svc\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.354625 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gpc2\" (UniqueName: \"kubernetes.io/projected/674f83a3-0419-43d7-a679-fed1bf09b047-kube-api-access-8gpc2\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.354683 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-swift-storage-0\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.354706 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.355606 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-svc\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.355984 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-swift-storage-0\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.356165 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-config\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.356423 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-sb\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.356718 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-nb\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.376371 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gpc2\" (UniqueName: \"kubernetes.io/projected/674f83a3-0419-43d7-a679-fed1bf09b047-kube-api-access-8gpc2\") pod \"dnsmasq-dns-5d8fc4ccc9-9n2hf\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.533138 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:50 crc kubenswrapper[4941]: I1130 07:07:50.827015 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 30 07:07:51 crc kubenswrapper[4941]: I1130 07:07:51.023234 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf"] Nov 30 07:07:51 crc kubenswrapper[4941]: I1130 07:07:51.950839 4941 generic.go:334] "Generic (PLEG): container finished" podID="674f83a3-0419-43d7-a679-fed1bf09b047" containerID="572b5e690bd61ad1cd6041023c0766c57db67af247693faeb9a84f745057643c" exitCode=0 Nov 30 07:07:51 crc kubenswrapper[4941]: I1130 07:07:51.950882 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" event={"ID":"674f83a3-0419-43d7-a679-fed1bf09b047","Type":"ContainerDied","Data":"572b5e690bd61ad1cd6041023c0766c57db67af247693faeb9a84f745057643c"} Nov 30 07:07:51 crc kubenswrapper[4941]: I1130 07:07:51.951214 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" event={"ID":"674f83a3-0419-43d7-a679-fed1bf09b047","Type":"ContainerStarted","Data":"d68b79741fbd75bc412bce49028cda937cb87623d90255ecb5e476e92709d910"} Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.174001 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.177075 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="proxy-httpd" containerID="cri-o://ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5" gracePeriod=30 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.177067 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-central-agent" containerID="cri-o://f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53" gracePeriod=30 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.177246 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-notification-agent" containerID="cri-o://8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa" gracePeriod=30 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.177283 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="sg-core" containerID="cri-o://397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e" gracePeriod=30 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.589652 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.977254 4941 generic.go:334] "Generic (PLEG): container finished" podID="2674708b-b628-41e1-b2d0-eab170a354dc" containerID="ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5" exitCode=0 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.977281 4941 generic.go:334] "Generic (PLEG): container finished" podID="2674708b-b628-41e1-b2d0-eab170a354dc" containerID="397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e" exitCode=2 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.977289 4941 generic.go:334] "Generic (PLEG): container finished" podID="2674708b-b628-41e1-b2d0-eab170a354dc" containerID="f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53" exitCode=0 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.977337 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerDied","Data":"ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5"} Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.977362 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerDied","Data":"397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e"} Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.977372 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerDied","Data":"f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53"} Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.979628 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" event={"ID":"674f83a3-0419-43d7-a679-fed1bf09b047","Type":"ContainerStarted","Data":"1de6943804783b4a852eb14e98f7041622c4c2594236d04ba6c18ed97142b408"} Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.979826 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.984891 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.985661 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-log" containerID="cri-o://fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e" gracePeriod=30 Nov 30 07:07:52 crc kubenswrapper[4941]: I1130 07:07:52.985741 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-api" containerID="cri-o://1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805" gracePeriod=30 Nov 30 07:07:53 crc kubenswrapper[4941]: I1130 07:07:53.003723 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" podStartSLOduration=3.003704746 podStartE2EDuration="3.003704746s" podCreationTimestamp="2025-11-30 07:07:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:52.999701864 +0000 UTC m=+1293.767873473" watchObservedRunningTime="2025-11-30 07:07:53.003704746 +0000 UTC m=+1293.771876355" Nov 30 07:07:53 crc kubenswrapper[4941]: I1130 07:07:53.989800 4941 generic.go:334] "Generic (PLEG): container finished" podID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerID="fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e" exitCode=143 Nov 30 07:07:53 crc kubenswrapper[4941]: I1130 07:07:53.989875 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a7e830a-370a-47e5-a1f8-08025b7c7bfd","Type":"ContainerDied","Data":"fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e"} Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.856581 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.958361 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-sg-core-conf-yaml\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.958663 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-config-data\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.958802 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xcnr\" (UniqueName: \"kubernetes.io/projected/2674708b-b628-41e1-b2d0-eab170a354dc-kube-api-access-8xcnr\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.958931 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-run-httpd\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.959011 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-log-httpd\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.959133 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-scripts\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.959249 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-combined-ca-bundle\") pod \"2674708b-b628-41e1-b2d0-eab170a354dc\" (UID: \"2674708b-b628-41e1-b2d0-eab170a354dc\") " Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.960664 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.960694 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.993235 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-scripts" (OuterVolumeSpecName: "scripts") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:54 crc kubenswrapper[4941]: I1130 07:07:54.995038 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2674708b-b628-41e1-b2d0-eab170a354dc-kube-api-access-8xcnr" (OuterVolumeSpecName: "kube-api-access-8xcnr") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "kube-api-access-8xcnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.001594 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.012917 4941 generic.go:334] "Generic (PLEG): container finished" podID="2674708b-b628-41e1-b2d0-eab170a354dc" containerID="8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa" exitCode=0 Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.012974 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerDied","Data":"8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa"} Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.013003 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.013018 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2674708b-b628-41e1-b2d0-eab170a354dc","Type":"ContainerDied","Data":"8c0cf1df1388dd474a5abb1afa7a48d1b41076a292ea53e0324684294fb3c135"} Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.013042 4941 scope.go:117] "RemoveContainer" containerID="ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.060749 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.060775 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xcnr\" (UniqueName: \"kubernetes.io/projected/2674708b-b628-41e1-b2d0-eab170a354dc-kube-api-access-8xcnr\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.060787 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.060798 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2674708b-b628-41e1-b2d0-eab170a354dc-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.060807 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.060892 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.072144 4941 scope.go:117] "RemoveContainer" containerID="397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.094572 4941 scope.go:117] "RemoveContainer" containerID="8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.102428 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-config-data" (OuterVolumeSpecName: "config-data") pod "2674708b-b628-41e1-b2d0-eab170a354dc" (UID: "2674708b-b628-41e1-b2d0-eab170a354dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.119550 4941 scope.go:117] "RemoveContainer" containerID="f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.141146 4941 scope.go:117] "RemoveContainer" containerID="ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.141567 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5\": container with ID starting with ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5 not found: ID does not exist" containerID="ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.141598 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5"} err="failed to get container status \"ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5\": rpc error: code = NotFound desc = could not find container \"ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5\": container with ID starting with ac2457f830afee919853c5cbd446b23ade6b8af3fa050f24f08f3e2052e191e5 not found: ID does not exist" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.141619 4941 scope.go:117] "RemoveContainer" containerID="397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.142020 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e\": container with ID starting with 397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e not found: ID does not exist" containerID="397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.142053 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e"} err="failed to get container status \"397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e\": rpc error: code = NotFound desc = could not find container \"397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e\": container with ID starting with 397ccdfe00a3e9f48e21f9b640ebaf66ae87e9d5c1e2476c8526bb035fe0440e not found: ID does not exist" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.142069 4941 scope.go:117] "RemoveContainer" containerID="8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.142318 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa\": container with ID starting with 8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa not found: ID does not exist" containerID="8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.142353 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa"} err="failed to get container status \"8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa\": rpc error: code = NotFound desc = could not find container \"8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa\": container with ID starting with 8f8156d2d89d7d1dfeeb16a5ea4ce40ac748af7eeefd49d6b62b01eeb67992aa not found: ID does not exist" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.142372 4941 scope.go:117] "RemoveContainer" containerID="f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.142758 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53\": container with ID starting with f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53 not found: ID does not exist" containerID="f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.142781 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53"} err="failed to get container status \"f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53\": rpc error: code = NotFound desc = could not find container \"f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53\": container with ID starting with f7233fc10c5e314a35401845074cf0a3e174dbc2256554839474c4ff7aa1cb53 not found: ID does not exist" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.161364 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.161392 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2674708b-b628-41e1-b2d0-eab170a354dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.353862 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.365595 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378160 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.378665 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="sg-core" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378684 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="sg-core" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.378702 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="proxy-httpd" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378708 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="proxy-httpd" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.378720 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-central-agent" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378726 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-central-agent" Nov 30 07:07:55 crc kubenswrapper[4941]: E1130 07:07:55.378738 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-notification-agent" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378743 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-notification-agent" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378932 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="sg-core" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378945 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="proxy-httpd" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378956 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-central-agent" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.378966 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" containerName="ceilometer-notification-agent" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.381050 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.383406 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.383665 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.387997 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.467561 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqm5s\" (UniqueName: \"kubernetes.io/projected/00f93a85-30b5-4574-a77a-89ab16e623f9-kube-api-access-jqm5s\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.467988 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.468131 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-scripts\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.468220 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-log-httpd\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.468344 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-run-httpd\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.468458 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-config-data\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.468566 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.534123 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2674708b-b628-41e1-b2d0-eab170a354dc" path="/var/lib/kubelet/pods/2674708b-b628-41e1-b2d0-eab170a354dc/volumes" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.570053 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-config-data\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.570311 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.570528 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqm5s\" (UniqueName: \"kubernetes.io/projected/00f93a85-30b5-4574-a77a-89ab16e623f9-kube-api-access-jqm5s\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.570746 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.570877 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-scripts\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.570971 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-log-httpd\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.571138 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-run-httpd\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.571695 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-log-httpd\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.571831 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-run-httpd\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.575497 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.575908 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.577095 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-config-data\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.577515 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-scripts\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.590914 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqm5s\" (UniqueName: \"kubernetes.io/projected/00f93a85-30b5-4574-a77a-89ab16e623f9-kube-api-access-jqm5s\") pod \"ceilometer-0\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.726173 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.842414 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:07:55 crc kubenswrapper[4941]: I1130 07:07:55.842891 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="ab69a64e-2e8a-4ee3-992a-50726935c90c" containerName="kube-state-metrics" containerID="cri-o://2058ab41ac78e9c36944b4e0398f24a4d71f927eba605bf782b4bf814c0342e4" gracePeriod=30 Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.034135 4941 generic.go:334] "Generic (PLEG): container finished" podID="ab69a64e-2e8a-4ee3-992a-50726935c90c" containerID="2058ab41ac78e9c36944b4e0398f24a4d71f927eba605bf782b4bf814c0342e4" exitCode=2 Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.034177 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ab69a64e-2e8a-4ee3-992a-50726935c90c","Type":"ContainerDied","Data":"2058ab41ac78e9c36944b4e0398f24a4d71f927eba605bf782b4bf814c0342e4"} Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.215489 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:56 crc kubenswrapper[4941]: W1130 07:07:56.252516 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00f93a85_30b5_4574_a77a_89ab16e623f9.slice/crio-acc462f7a43e22a90e2cdb32a890ed8b2d5cdaf25a2bb94c02909ca7d189197c WatchSource:0}: Error finding container acc462f7a43e22a90e2cdb32a890ed8b2d5cdaf25a2bb94c02909ca7d189197c: Status 404 returned error can't find the container with id acc462f7a43e22a90e2cdb32a890ed8b2d5cdaf25a2bb94c02909ca7d189197c Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.274117 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.283895 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w2kc\" (UniqueName: \"kubernetes.io/projected/ab69a64e-2e8a-4ee3-992a-50726935c90c-kube-api-access-4w2kc\") pod \"ab69a64e-2e8a-4ee3-992a-50726935c90c\" (UID: \"ab69a64e-2e8a-4ee3-992a-50726935c90c\") " Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.295431 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab69a64e-2e8a-4ee3-992a-50726935c90c-kube-api-access-4w2kc" (OuterVolumeSpecName: "kube-api-access-4w2kc") pod "ab69a64e-2e8a-4ee3-992a-50726935c90c" (UID: "ab69a64e-2e8a-4ee3-992a-50726935c90c"). InnerVolumeSpecName "kube-api-access-4w2kc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.385649 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w2kc\" (UniqueName: \"kubernetes.io/projected/ab69a64e-2e8a-4ee3-992a-50726935c90c-kube-api-access-4w2kc\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.654037 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.802060 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fqgf\" (UniqueName: \"kubernetes.io/projected/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-kube-api-access-2fqgf\") pod \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.802577 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-combined-ca-bundle\") pod \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.802641 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-logs\") pod \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.802730 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-config-data\") pod \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\" (UID: \"7a7e830a-370a-47e5-a1f8-08025b7c7bfd\") " Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.803199 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-logs" (OuterVolumeSpecName: "logs") pod "7a7e830a-370a-47e5-a1f8-08025b7c7bfd" (UID: "7a7e830a-370a-47e5-a1f8-08025b7c7bfd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.807825 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-kube-api-access-2fqgf" (OuterVolumeSpecName: "kube-api-access-2fqgf") pod "7a7e830a-370a-47e5-a1f8-08025b7c7bfd" (UID: "7a7e830a-370a-47e5-a1f8-08025b7c7bfd"). InnerVolumeSpecName "kube-api-access-2fqgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.832565 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-config-data" (OuterVolumeSpecName: "config-data") pod "7a7e830a-370a-47e5-a1f8-08025b7c7bfd" (UID: "7a7e830a-370a-47e5-a1f8-08025b7c7bfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.855531 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a7e830a-370a-47e5-a1f8-08025b7c7bfd" (UID: "7a7e830a-370a-47e5-a1f8-08025b7c7bfd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.904649 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.904677 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.904693 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fqgf\" (UniqueName: \"kubernetes.io/projected/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-kube-api-access-2fqgf\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:56 crc kubenswrapper[4941]: I1130 07:07:56.904705 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a7e830a-370a-47e5-a1f8-08025b7c7bfd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.043277 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerStarted","Data":"acc462f7a43e22a90e2cdb32a890ed8b2d5cdaf25a2bb94c02909ca7d189197c"} Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.048076 4941 generic.go:334] "Generic (PLEG): container finished" podID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerID="1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805" exitCode=0 Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.048261 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.048421 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a7e830a-370a-47e5-a1f8-08025b7c7bfd","Type":"ContainerDied","Data":"1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805"} Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.048475 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7a7e830a-370a-47e5-a1f8-08025b7c7bfd","Type":"ContainerDied","Data":"2038f1df53f731b9c65e574dc232805a771cf6552eb07f223c84287a856ad7bb"} Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.048498 4941 scope.go:117] "RemoveContainer" containerID="1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.052263 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ab69a64e-2e8a-4ee3-992a-50726935c90c","Type":"ContainerDied","Data":"680401b99a5edecf9745ebc590dc84410517c18ef14238907216b9e91dc06b56"} Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.052362 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.074454 4941 scope.go:117] "RemoveContainer" containerID="fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.089044 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.097760 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115077 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: E1130 07:07:57.115555 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-api" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115569 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-api" Nov 30 07:07:57 crc kubenswrapper[4941]: E1130 07:07:57.115582 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-log" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115589 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-log" Nov 30 07:07:57 crc kubenswrapper[4941]: E1130 07:07:57.115603 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab69a64e-2e8a-4ee3-992a-50726935c90c" containerName="kube-state-metrics" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115609 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab69a64e-2e8a-4ee3-992a-50726935c90c" containerName="kube-state-metrics" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115779 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-log" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115793 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" containerName="nova-api-api" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.115806 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab69a64e-2e8a-4ee3-992a-50726935c90c" containerName="kube-state-metrics" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.116821 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.128963 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.129192 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.129340 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.179996 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.180160 4941 scope.go:117] "RemoveContainer" containerID="1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805" Nov 30 07:07:57 crc kubenswrapper[4941]: E1130 07:07:57.180646 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805\": container with ID starting with 1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805 not found: ID does not exist" containerID="1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.180742 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805"} err="failed to get container status \"1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805\": rpc error: code = NotFound desc = could not find container \"1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805\": container with ID starting with 1c8960dfc5ece461106c10ff96b857d1e47969bc12b209fc957bcdd97cad4805 not found: ID does not exist" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.180772 4941 scope.go:117] "RemoveContainer" containerID="fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e" Nov 30 07:07:57 crc kubenswrapper[4941]: E1130 07:07:57.181148 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e\": container with ID starting with fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e not found: ID does not exist" containerID="fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.181162 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e"} err="failed to get container status \"fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e\": rpc error: code = NotFound desc = could not find container \"fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e\": container with ID starting with fa1d5f1ed2b8a6b30ae6181aea495fe9c2a5795c9caddabc6b484af0f543310e not found: ID does not exist" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.181177 4941 scope.go:117] "RemoveContainer" containerID="2058ab41ac78e9c36944b4e0398f24a4d71f927eba605bf782b4bf814c0342e4" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.190618 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.205565 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.213898 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.215198 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.217562 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.217734 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.233154 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.312506 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-public-tls-certs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.312576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbd9b93f-7419-4280-9793-b771c4171a7f-logs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.312908 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg6fn\" (UniqueName: \"kubernetes.io/projected/dbd9b93f-7419-4280-9793-b771c4171a7f-kube-api-access-pg6fn\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.313263 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.313342 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-config-data\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.313563 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415383 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/a65953b8-4285-412b-9670-7747951a62ae-kube-api-access-2lt72\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415502 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415557 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg6fn\" (UniqueName: \"kubernetes.io/projected/dbd9b93f-7419-4280-9793-b771c4171a7f-kube-api-access-pg6fn\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415625 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-config-data\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415681 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415707 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415758 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-public-tls-certs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415794 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.415839 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbd9b93f-7419-4280-9793-b771c4171a7f-logs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.416389 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbd9b93f-7419-4280-9793-b771c4171a7f-logs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.422046 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.422276 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-public-tls-certs\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.422992 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-config-data\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.428123 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.430974 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg6fn\" (UniqueName: \"kubernetes.io/projected/dbd9b93f-7419-4280-9793-b771c4171a7f-kube-api-access-pg6fn\") pod \"nova-api-0\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.466837 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.517291 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.517393 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.517442 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.517494 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/a65953b8-4285-412b-9670-7747951a62ae-kube-api-access-2lt72\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.521620 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.521738 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.528950 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.531973 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/a65953b8-4285-412b-9670-7747951a62ae-kube-api-access-2lt72\") pod \"kube-state-metrics-0\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.535134 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.558009 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a7e830a-370a-47e5-a1f8-08025b7c7bfd" path="/var/lib/kubelet/pods/7a7e830a-370a-47e5-a1f8-08025b7c7bfd/volumes" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.558589 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab69a64e-2e8a-4ee3-992a-50726935c90c" path="/var/lib/kubelet/pods/ab69a64e-2e8a-4ee3-992a-50726935c90c/volumes" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.589200 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.632867 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.640163 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:57 crc kubenswrapper[4941]: I1130 07:07:57.978943 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:07:57 crc kubenswrapper[4941]: W1130 07:07:57.988075 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbd9b93f_7419_4280_9793_b771c4171a7f.slice/crio-9147e7e649babe4bb8ffd247b93ee8aafc570d3be094af378dc4e475bf23ce3e WatchSource:0}: Error finding container 9147e7e649babe4bb8ffd247b93ee8aafc570d3be094af378dc4e475bf23ce3e: Status 404 returned error can't find the container with id 9147e7e649babe4bb8ffd247b93ee8aafc570d3be094af378dc4e475bf23ce3e Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.069364 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dbd9b93f-7419-4280-9793-b771c4171a7f","Type":"ContainerStarted","Data":"9147e7e649babe4bb8ffd247b93ee8aafc570d3be094af378dc4e475bf23ce3e"} Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.085844 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.094637 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerStarted","Data":"1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef"} Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.094672 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerStarted","Data":"2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98"} Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.115929 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.309177 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-hww95"] Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.310757 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.320487 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.320572 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.342499 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-hww95"] Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.434507 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.434797 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-config-data\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.435089 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-scripts\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.435144 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msrht\" (UniqueName: \"kubernetes.io/projected/02e23704-d56c-4d7b-81f9-729b38a5c39e-kube-api-access-msrht\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.536478 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-scripts\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.536524 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msrht\" (UniqueName: \"kubernetes.io/projected/02e23704-d56c-4d7b-81f9-729b38a5c39e-kube-api-access-msrht\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.536585 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.536647 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-config-data\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.540750 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.542242 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-config-data\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.545624 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-scripts\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.559236 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msrht\" (UniqueName: \"kubernetes.io/projected/02e23704-d56c-4d7b-81f9-729b38a5c39e-kube-api-access-msrht\") pod \"nova-cell1-cell-mapping-hww95\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:58 crc kubenswrapper[4941]: I1130 07:07:58.661136 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.102667 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a65953b8-4285-412b-9670-7747951a62ae","Type":"ContainerStarted","Data":"8e135084f5429afb87f87c92443142f7bdd20c8c7f175058d25d225c984bf164"} Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.104688 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.104826 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a65953b8-4285-412b-9670-7747951a62ae","Type":"ContainerStarted","Data":"521abc31c0f8b06fae40f6723b74a8072511b6b173dfcfbfc7b2a2f7ed5c112b"} Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.105302 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dbd9b93f-7419-4280-9793-b771c4171a7f","Type":"ContainerStarted","Data":"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d"} Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.105360 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dbd9b93f-7419-4280-9793-b771c4171a7f","Type":"ContainerStarted","Data":"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a"} Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.109571 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerStarted","Data":"58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322"} Nov 30 07:07:59 crc kubenswrapper[4941]: W1130 07:07:59.123942 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02e23704_d56c_4d7b_81f9_729b38a5c39e.slice/crio-50b7646b024ecae5dc355e7bfea6c36ce49aa7f5d490b17c38a11797413ffe48 WatchSource:0}: Error finding container 50b7646b024ecae5dc355e7bfea6c36ce49aa7f5d490b17c38a11797413ffe48: Status 404 returned error can't find the container with id 50b7646b024ecae5dc355e7bfea6c36ce49aa7f5d490b17c38a11797413ffe48 Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.128256 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-hww95"] Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.139432 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.724796593 podStartE2EDuration="2.139378422s" podCreationTimestamp="2025-11-30 07:07:57 +0000 UTC" firstStartedPulling="2025-11-30 07:07:58.107219671 +0000 UTC m=+1298.875391280" lastFinishedPulling="2025-11-30 07:07:58.52180149 +0000 UTC m=+1299.289973109" observedRunningTime="2025-11-30 07:07:59.126142717 +0000 UTC m=+1299.894314326" watchObservedRunningTime="2025-11-30 07:07:59.139378422 +0000 UTC m=+1299.907550031" Nov 30 07:07:59 crc kubenswrapper[4941]: I1130 07:07:59.158014 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.157994901 podStartE2EDuration="2.157994901s" podCreationTimestamp="2025-11-30 07:07:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:07:59.149933095 +0000 UTC m=+1299.918104704" watchObservedRunningTime="2025-11-30 07:07:59.157994901 +0000 UTC m=+1299.926166510" Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.123993 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerStarted","Data":"08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d"} Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.125091 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-central-agent" containerID="cri-o://2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98" gracePeriod=30 Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.125650 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.125624 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="sg-core" containerID="cri-o://58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322" gracePeriod=30 Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.125739 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="proxy-httpd" containerID="cri-o://08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d" gracePeriod=30 Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.126111 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-notification-agent" containerID="cri-o://1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef" gracePeriod=30 Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.134933 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hww95" event={"ID":"02e23704-d56c-4d7b-81f9-729b38a5c39e","Type":"ContainerStarted","Data":"b105b8d565a21c51a0e3819153e09f1faf3ad8232f7dd86d2a96a1c95531e6bf"} Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.134980 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hww95" event={"ID":"02e23704-d56c-4d7b-81f9-729b38a5c39e","Type":"ContainerStarted","Data":"50b7646b024ecae5dc355e7bfea6c36ce49aa7f5d490b17c38a11797413ffe48"} Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.153397 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.828136854 podStartE2EDuration="5.153377808s" podCreationTimestamp="2025-11-30 07:07:55 +0000 UTC" firstStartedPulling="2025-11-30 07:07:56.255493235 +0000 UTC m=+1297.023664844" lastFinishedPulling="2025-11-30 07:07:59.580734189 +0000 UTC m=+1300.348905798" observedRunningTime="2025-11-30 07:08:00.147643612 +0000 UTC m=+1300.915815221" watchObservedRunningTime="2025-11-30 07:08:00.153377808 +0000 UTC m=+1300.921549417" Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.167612 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-hww95" podStartSLOduration=2.167599032 podStartE2EDuration="2.167599032s" podCreationTimestamp="2025-11-30 07:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:08:00.165970593 +0000 UTC m=+1300.934142202" watchObservedRunningTime="2025-11-30 07:08:00.167599032 +0000 UTC m=+1300.935770641" Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.534496 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.607580 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-7tm62"] Nov 30 07:08:00 crc kubenswrapper[4941]: I1130 07:08:00.608109 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="dnsmasq-dns" containerID="cri-o://23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b" gracePeriod=10 Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.127629 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.148833 4941 generic.go:334] "Generic (PLEG): container finished" podID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerID="08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d" exitCode=0 Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.148878 4941 generic.go:334] "Generic (PLEG): container finished" podID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerID="58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322" exitCode=2 Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.148870 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerDied","Data":"08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d"} Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.148925 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerDied","Data":"58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322"} Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.148936 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerDied","Data":"1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef"} Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.148890 4941 generic.go:334] "Generic (PLEG): container finished" podID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerID="1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef" exitCode=0 Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.152119 4941 generic.go:334] "Generic (PLEG): container finished" podID="8e818a15-1640-4100-b312-18c88fff65b0" containerID="23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b" exitCode=0 Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.153116 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.153286 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" event={"ID":"8e818a15-1640-4100-b312-18c88fff65b0","Type":"ContainerDied","Data":"23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b"} Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.153313 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" event={"ID":"8e818a15-1640-4100-b312-18c88fff65b0","Type":"ContainerDied","Data":"489f7c34b5660e80045220ab667e006f0757dea92c93ec674650c009fd0b9f13"} Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.153348 4941 scope.go:117] "RemoveContainer" containerID="23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.180601 4941 scope.go:117] "RemoveContainer" containerID="b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.208225 4941 scope.go:117] "RemoveContainer" containerID="23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b" Nov 30 07:08:01 crc kubenswrapper[4941]: E1130 07:08:01.208737 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b\": container with ID starting with 23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b not found: ID does not exist" containerID="23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.208764 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b"} err="failed to get container status \"23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b\": rpc error: code = NotFound desc = could not find container \"23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b\": container with ID starting with 23e9e0633492fcdbffea7c24aca1487034d48683de473cc4fc6a2ad7e1a0f51b not found: ID does not exist" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.208781 4941 scope.go:117] "RemoveContainer" containerID="b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816" Nov 30 07:08:01 crc kubenswrapper[4941]: E1130 07:08:01.209173 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816\": container with ID starting with b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816 not found: ID does not exist" containerID="b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.209406 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816"} err="failed to get container status \"b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816\": rpc error: code = NotFound desc = could not find container \"b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816\": container with ID starting with b2933435264d25c63a5a8c4670c962c223ab7283cd51671d9043eb973329c816 not found: ID does not exist" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.215742 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-nb\") pod \"8e818a15-1640-4100-b312-18c88fff65b0\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.215853 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-config\") pod \"8e818a15-1640-4100-b312-18c88fff65b0\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.215897 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-svc\") pod \"8e818a15-1640-4100-b312-18c88fff65b0\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.215944 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-swift-storage-0\") pod \"8e818a15-1640-4100-b312-18c88fff65b0\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.216066 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bxjb\" (UniqueName: \"kubernetes.io/projected/8e818a15-1640-4100-b312-18c88fff65b0-kube-api-access-5bxjb\") pod \"8e818a15-1640-4100-b312-18c88fff65b0\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.216105 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-sb\") pod \"8e818a15-1640-4100-b312-18c88fff65b0\" (UID: \"8e818a15-1640-4100-b312-18c88fff65b0\") " Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.251693 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e818a15-1640-4100-b312-18c88fff65b0-kube-api-access-5bxjb" (OuterVolumeSpecName: "kube-api-access-5bxjb") pod "8e818a15-1640-4100-b312-18c88fff65b0" (UID: "8e818a15-1640-4100-b312-18c88fff65b0"). InnerVolumeSpecName "kube-api-access-5bxjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.319151 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bxjb\" (UniqueName: \"kubernetes.io/projected/8e818a15-1640-4100-b312-18c88fff65b0-kube-api-access-5bxjb\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.337163 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8e818a15-1640-4100-b312-18c88fff65b0" (UID: "8e818a15-1640-4100-b312-18c88fff65b0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.359069 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8e818a15-1640-4100-b312-18c88fff65b0" (UID: "8e818a15-1640-4100-b312-18c88fff65b0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.371719 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8e818a15-1640-4100-b312-18c88fff65b0" (UID: "8e818a15-1640-4100-b312-18c88fff65b0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.371812 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8e818a15-1640-4100-b312-18c88fff65b0" (UID: "8e818a15-1640-4100-b312-18c88fff65b0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.402653 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-config" (OuterVolumeSpecName: "config") pod "8e818a15-1640-4100-b312-18c88fff65b0" (UID: "8e818a15-1640-4100-b312-18c88fff65b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.421417 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.421454 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.421468 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.421476 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.421484 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e818a15-1640-4100-b312-18c88fff65b0-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.483177 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-7tm62"] Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.490501 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5594d9b959-7tm62"] Nov 30 07:08:01 crc kubenswrapper[4941]: I1130 07:08:01.530836 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e818a15-1640-4100-b312-18c88fff65b0" path="/var/lib/kubelet/pods/8e818a15-1640-4100-b312-18c88fff65b0/volumes" Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.873866 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969466 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-combined-ca-bundle\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969629 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-sg-core-conf-yaml\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969680 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-config-data\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969708 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqm5s\" (UniqueName: \"kubernetes.io/projected/00f93a85-30b5-4574-a77a-89ab16e623f9-kube-api-access-jqm5s\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969739 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-log-httpd\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969795 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-scripts\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.969921 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-run-httpd\") pod \"00f93a85-30b5-4574-a77a-89ab16e623f9\" (UID: \"00f93a85-30b5-4574-a77a-89ab16e623f9\") " Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.970932 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.971697 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.975971 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-scripts" (OuterVolumeSpecName: "scripts") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:03 crc kubenswrapper[4941]: I1130 07:08:03.989455 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00f93a85-30b5-4574-a77a-89ab16e623f9-kube-api-access-jqm5s" (OuterVolumeSpecName: "kube-api-access-jqm5s") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "kube-api-access-jqm5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.000470 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.045796 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.069828 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-config-data" (OuterVolumeSpecName: "config-data") pod "00f93a85-30b5-4574-a77a-89ab16e623f9" (UID: "00f93a85-30b5-4574-a77a-89ab16e623f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072081 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072236 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072320 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072408 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072465 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqm5s\" (UniqueName: \"kubernetes.io/projected/00f93a85-30b5-4574-a77a-89ab16e623f9-kube-api-access-jqm5s\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072520 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00f93a85-30b5-4574-a77a-89ab16e623f9-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.072578 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00f93a85-30b5-4574-a77a-89ab16e623f9-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.182241 4941 generic.go:334] "Generic (PLEG): container finished" podID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerID="2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98" exitCode=0 Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.182303 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerDied","Data":"2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98"} Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.182345 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00f93a85-30b5-4574-a77a-89ab16e623f9","Type":"ContainerDied","Data":"acc462f7a43e22a90e2cdb32a890ed8b2d5cdaf25a2bb94c02909ca7d189197c"} Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.182364 4941 scope.go:117] "RemoveContainer" containerID="08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.182492 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.191874 4941 generic.go:334] "Generic (PLEG): container finished" podID="02e23704-d56c-4d7b-81f9-729b38a5c39e" containerID="b105b8d565a21c51a0e3819153e09f1faf3ad8232f7dd86d2a96a1c95531e6bf" exitCode=0 Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.191916 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hww95" event={"ID":"02e23704-d56c-4d7b-81f9-729b38a5c39e","Type":"ContainerDied","Data":"b105b8d565a21c51a0e3819153e09f1faf3ad8232f7dd86d2a96a1c95531e6bf"} Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.223880 4941 scope.go:117] "RemoveContainer" containerID="58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.240921 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.252837 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261037 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.261484 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="dnsmasq-dns" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261502 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="dnsmasq-dns" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.261518 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="proxy-httpd" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261525 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="proxy-httpd" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.261540 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-central-agent" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261547 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-central-agent" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.261556 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="init" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261562 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="init" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.261583 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-notification-agent" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261588 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-notification-agent" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.261600 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="sg-core" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261605 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="sg-core" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261777 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="proxy-httpd" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261789 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-notification-agent" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261798 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="dnsmasq-dns" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261809 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="ceilometer-central-agent" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.261825 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" containerName="sg-core" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.263544 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.266896 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.267096 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.269969 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.272065 4941 scope.go:117] "RemoveContainer" containerID="1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.274760 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.304025 4941 scope.go:117] "RemoveContainer" containerID="2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.327027 4941 scope.go:117] "RemoveContainer" containerID="08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.327567 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d\": container with ID starting with 08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d not found: ID does not exist" containerID="08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.327609 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d"} err="failed to get container status \"08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d\": rpc error: code = NotFound desc = could not find container \"08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d\": container with ID starting with 08f351dbc4ad423380e0d06911ce426fbef2b8518271f6598d5a75757db8bc7d not found: ID does not exist" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.327635 4941 scope.go:117] "RemoveContainer" containerID="58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.328093 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322\": container with ID starting with 58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322 not found: ID does not exist" containerID="58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.328119 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322"} err="failed to get container status \"58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322\": rpc error: code = NotFound desc = could not find container \"58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322\": container with ID starting with 58c5b44ab7d47dc47bfd8652f577e4629509bf7e3312c5380fe20ba0789ac322 not found: ID does not exist" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.328133 4941 scope.go:117] "RemoveContainer" containerID="1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.328397 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef\": container with ID starting with 1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef not found: ID does not exist" containerID="1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.328417 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef"} err="failed to get container status \"1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef\": rpc error: code = NotFound desc = could not find container \"1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef\": container with ID starting with 1c13457f7311f87aebbecfaa43973da297ab272e7452eee88c38e0083202fbef not found: ID does not exist" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.328434 4941 scope.go:117] "RemoveContainer" containerID="2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98" Nov 30 07:08:04 crc kubenswrapper[4941]: E1130 07:08:04.328663 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98\": container with ID starting with 2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98 not found: ID does not exist" containerID="2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.328687 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98"} err="failed to get container status \"2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98\": rpc error: code = NotFound desc = could not find container \"2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98\": container with ID starting with 2700dead76e4c4e0c0d16f86c6cb2934b2b7e7b9b6f683f0166d380c77a40a98 not found: ID does not exist" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.378870 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.378920 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-config-data\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.378950 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-scripts\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.378969 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.378992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvshn\" (UniqueName: \"kubernetes.io/projected/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-kube-api-access-vvshn\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.379282 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-log-httpd\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.379462 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.379572 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-run-httpd\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482062 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482142 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-run-httpd\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482275 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482316 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-config-data\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-scripts\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482475 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvshn\" (UniqueName: \"kubernetes.io/projected/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-kube-api-access-vvshn\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.482612 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-run-httpd\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.483541 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-log-httpd\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.486419 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-log-httpd\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.488238 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-config-data\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.489150 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.489583 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.489840 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-scripts\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.490876 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.512789 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvshn\" (UniqueName: \"kubernetes.io/projected/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-kube-api-access-vvshn\") pod \"ceilometer-0\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " pod="openstack/ceilometer-0" Nov 30 07:08:04 crc kubenswrapper[4941]: I1130 07:08:04.594785 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.073251 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:08:05 crc kubenswrapper[4941]: W1130 07:08:05.075009 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22508ba9_7cfd_462a_9b94_3ee1d8c0a15b.slice/crio-d4a8fe8ec71334257724b982b05a4d03e0a109e845c1904eef0241236daa8001 WatchSource:0}: Error finding container d4a8fe8ec71334257724b982b05a4d03e0a109e845c1904eef0241236daa8001: Status 404 returned error can't find the container with id d4a8fe8ec71334257724b982b05a4d03e0a109e845c1904eef0241236daa8001 Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.077844 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.206581 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerStarted","Data":"d4a8fe8ec71334257724b982b05a4d03e0a109e845c1904eef0241236daa8001"} Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.532290 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00f93a85-30b5-4574-a77a-89ab16e623f9" path="/var/lib/kubelet/pods/00f93a85-30b5-4574-a77a-89ab16e623f9/volumes" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.667063 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.847437 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-combined-ca-bundle\") pod \"02e23704-d56c-4d7b-81f9-729b38a5c39e\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.848125 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-scripts\") pod \"02e23704-d56c-4d7b-81f9-729b38a5c39e\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.848300 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-config-data\") pod \"02e23704-d56c-4d7b-81f9-729b38a5c39e\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.848683 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msrht\" (UniqueName: \"kubernetes.io/projected/02e23704-d56c-4d7b-81f9-729b38a5c39e-kube-api-access-msrht\") pod \"02e23704-d56c-4d7b-81f9-729b38a5c39e\" (UID: \"02e23704-d56c-4d7b-81f9-729b38a5c39e\") " Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.861576 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-scripts" (OuterVolumeSpecName: "scripts") pod "02e23704-d56c-4d7b-81f9-729b38a5c39e" (UID: "02e23704-d56c-4d7b-81f9-729b38a5c39e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.872438 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02e23704-d56c-4d7b-81f9-729b38a5c39e-kube-api-access-msrht" (OuterVolumeSpecName: "kube-api-access-msrht") pod "02e23704-d56c-4d7b-81f9-729b38a5c39e" (UID: "02e23704-d56c-4d7b-81f9-729b38a5c39e"). InnerVolumeSpecName "kube-api-access-msrht". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.884412 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-config-data" (OuterVolumeSpecName: "config-data") pod "02e23704-d56c-4d7b-81f9-729b38a5c39e" (UID: "02e23704-d56c-4d7b-81f9-729b38a5c39e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.901803 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02e23704-d56c-4d7b-81f9-729b38a5c39e" (UID: "02e23704-d56c-4d7b-81f9-729b38a5c39e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.951991 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.952026 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msrht\" (UniqueName: \"kubernetes.io/projected/02e23704-d56c-4d7b-81f9-729b38a5c39e-kube-api-access-msrht\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.952039 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:05 crc kubenswrapper[4941]: I1130 07:08:05.952050 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02e23704-d56c-4d7b-81f9-729b38a5c39e-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.079366 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5594d9b959-7tm62" podUID="8e818a15-1640-4100-b312-18c88fff65b0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.180:5353: i/o timeout" Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.216791 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hww95" event={"ID":"02e23704-d56c-4d7b-81f9-729b38a5c39e","Type":"ContainerDied","Data":"50b7646b024ecae5dc355e7bfea6c36ce49aa7f5d490b17c38a11797413ffe48"} Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.216840 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50b7646b024ecae5dc355e7bfea6c36ce49aa7f5d490b17c38a11797413ffe48" Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.216864 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hww95" Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.219126 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerStarted","Data":"258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d"} Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.394198 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.394473 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-log" containerID="cri-o://587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a" gracePeriod=30 Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.394622 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-api" containerID="cri-o://f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d" gracePeriod=30 Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.427519 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.427818 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" containerName="nova-scheduler-scheduler" containerID="cri-o://7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" gracePeriod=30 Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.440938 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.441239 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-log" containerID="cri-o://b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d" gracePeriod=30 Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.441525 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-metadata" containerID="cri-o://18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f" gracePeriod=30 Nov 30 07:08:06 crc kubenswrapper[4941]: I1130 07:08:06.938218 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.076150 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-config-data\") pod \"dbd9b93f-7419-4280-9793-b771c4171a7f\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.076216 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-internal-tls-certs\") pod \"dbd9b93f-7419-4280-9793-b771c4171a7f\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.076375 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbd9b93f-7419-4280-9793-b771c4171a7f-logs\") pod \"dbd9b93f-7419-4280-9793-b771c4171a7f\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.076430 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-combined-ca-bundle\") pod \"dbd9b93f-7419-4280-9793-b771c4171a7f\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.076511 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-public-tls-certs\") pod \"dbd9b93f-7419-4280-9793-b771c4171a7f\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.076583 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg6fn\" (UniqueName: \"kubernetes.io/projected/dbd9b93f-7419-4280-9793-b771c4171a7f-kube-api-access-pg6fn\") pod \"dbd9b93f-7419-4280-9793-b771c4171a7f\" (UID: \"dbd9b93f-7419-4280-9793-b771c4171a7f\") " Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.077696 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbd9b93f-7419-4280-9793-b771c4171a7f-logs" (OuterVolumeSpecName: "logs") pod "dbd9b93f-7419-4280-9793-b771c4171a7f" (UID: "dbd9b93f-7419-4280-9793-b771c4171a7f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.083193 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbd9b93f-7419-4280-9793-b771c4171a7f-kube-api-access-pg6fn" (OuterVolumeSpecName: "kube-api-access-pg6fn") pod "dbd9b93f-7419-4280-9793-b771c4171a7f" (UID: "dbd9b93f-7419-4280-9793-b771c4171a7f"). InnerVolumeSpecName "kube-api-access-pg6fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.122656 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dbd9b93f-7419-4280-9793-b771c4171a7f" (UID: "dbd9b93f-7419-4280-9793-b771c4171a7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.124239 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-config-data" (OuterVolumeSpecName: "config-data") pod "dbd9b93f-7419-4280-9793-b771c4171a7f" (UID: "dbd9b93f-7419-4280-9793-b771c4171a7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.136484 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dbd9b93f-7419-4280-9793-b771c4171a7f" (UID: "dbd9b93f-7419-4280-9793-b771c4171a7f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.150927 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dbd9b93f-7419-4280-9793-b771c4171a7f" (UID: "dbd9b93f-7419-4280-9793-b771c4171a7f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.180226 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.180260 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg6fn\" (UniqueName: \"kubernetes.io/projected/dbd9b93f-7419-4280-9793-b771c4171a7f-kube-api-access-pg6fn\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.180275 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.180287 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.180297 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dbd9b93f-7419-4280-9793-b771c4171a7f-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.180308 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd9b93f-7419-4280-9793-b771c4171a7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.231135 4941 generic.go:334] "Generic (PLEG): container finished" podID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerID="b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d" exitCode=143 Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.231246 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b310ca61-cc2e-42ae-9f15-79f8c6f38b46","Type":"ContainerDied","Data":"b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d"} Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.234977 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerStarted","Data":"89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4"} Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.235023 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerStarted","Data":"b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b"} Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237212 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerID="f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d" exitCode=0 Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237235 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerID="587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a" exitCode=143 Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237248 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dbd9b93f-7419-4280-9793-b771c4171a7f","Type":"ContainerDied","Data":"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d"} Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237268 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237291 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dbd9b93f-7419-4280-9793-b771c4171a7f","Type":"ContainerDied","Data":"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a"} Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237306 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dbd9b93f-7419-4280-9793-b771c4171a7f","Type":"ContainerDied","Data":"9147e7e649babe4bb8ffd247b93ee8aafc570d3be094af378dc4e475bf23ce3e"} Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.237342 4941 scope.go:117] "RemoveContainer" containerID="f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.264411 4941 scope.go:117] "RemoveContainer" containerID="587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.268759 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.276797 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.296393 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:07 crc kubenswrapper[4941]: E1130 07:08:07.296867 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-log" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.296887 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-log" Nov 30 07:08:07 crc kubenswrapper[4941]: E1130 07:08:07.296897 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-api" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.296904 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-api" Nov 30 07:08:07 crc kubenswrapper[4941]: E1130 07:08:07.296917 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02e23704-d56c-4d7b-81f9-729b38a5c39e" containerName="nova-manage" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.296924 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="02e23704-d56c-4d7b-81f9-729b38a5c39e" containerName="nova-manage" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.297107 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-api" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.297140 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="02e23704-d56c-4d7b-81f9-729b38a5c39e" containerName="nova-manage" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.297157 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" containerName="nova-api-log" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.298269 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.298546 4941 scope.go:117] "RemoveContainer" containerID="f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d" Nov 30 07:08:07 crc kubenswrapper[4941]: E1130 07:08:07.302152 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d\": container with ID starting with f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d not found: ID does not exist" containerID="f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.302200 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d"} err="failed to get container status \"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d\": rpc error: code = NotFound desc = could not find container \"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d\": container with ID starting with f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d not found: ID does not exist" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.302225 4941 scope.go:117] "RemoveContainer" containerID="587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.302403 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 30 07:08:07 crc kubenswrapper[4941]: E1130 07:08:07.302913 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a\": container with ID starting with 587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a not found: ID does not exist" containerID="587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.302957 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a"} err="failed to get container status \"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a\": rpc error: code = NotFound desc = could not find container \"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a\": container with ID starting with 587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a not found: ID does not exist" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.302984 4941 scope.go:117] "RemoveContainer" containerID="f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.303143 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.303262 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.303579 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d"} err="failed to get container status \"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d\": rpc error: code = NotFound desc = could not find container \"f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d\": container with ID starting with f030a7952dadc4bcf9b89a3df48e13973c84165ebd43ed09a49dc747f00e964d not found: ID does not exist" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.303614 4941 scope.go:117] "RemoveContainer" containerID="587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.303855 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a"} err="failed to get container status \"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a\": rpc error: code = NotFound desc = could not find container \"587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a\": container with ID starting with 587a0fbd704ba212c4c2ad30f9f7d5859aa8d5601a46fb5adee4806d910e4e0a not found: ID does not exist" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.311438 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.485183 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k5mm\" (UniqueName: \"kubernetes.io/projected/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-kube-api-access-6k5mm\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.485313 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.485385 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-config-data\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.485438 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-logs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.485484 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.485526 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.552518 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbd9b93f-7419-4280-9793-b771c4171a7f" path="/var/lib/kubelet/pods/dbd9b93f-7419-4280-9793-b771c4171a7f/volumes" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.554644 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587298 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-config-data\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587382 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-logs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587410 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587448 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587544 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k5mm\" (UniqueName: \"kubernetes.io/projected/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-kube-api-access-6k5mm\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587630 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.587897 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-logs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.591177 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-config-data\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.594162 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.594733 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.594782 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.613102 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k5mm\" (UniqueName: \"kubernetes.io/projected/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-kube-api-access-6k5mm\") pod \"nova-api-0\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " pod="openstack/nova-api-0" Nov 30 07:08:07 crc kubenswrapper[4941]: I1130 07:08:07.626796 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:08:08 crc kubenswrapper[4941]: E1130 07:08:08.092350 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:08:08 crc kubenswrapper[4941]: E1130 07:08:08.094009 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:08:08 crc kubenswrapper[4941]: E1130 07:08:08.096597 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:08:08 crc kubenswrapper[4941]: E1130 07:08:08.096630 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" containerName="nova-scheduler-scheduler" Nov 30 07:08:08 crc kubenswrapper[4941]: I1130 07:08:08.168859 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:08 crc kubenswrapper[4941]: W1130 07:08:08.169517 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4571a7ec_45e1_4c4b_a96a_b9841b3d89bc.slice/crio-2ab8ea4ffd71c0b3f30bcce75240082e341bcc1790224dff20c8c1d510c7b505 WatchSource:0}: Error finding container 2ab8ea4ffd71c0b3f30bcce75240082e341bcc1790224dff20c8c1d510c7b505: Status 404 returned error can't find the container with id 2ab8ea4ffd71c0b3f30bcce75240082e341bcc1790224dff20c8c1d510c7b505 Nov 30 07:08:08 crc kubenswrapper[4941]: I1130 07:08:08.255165 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc","Type":"ContainerStarted","Data":"2ab8ea4ffd71c0b3f30bcce75240082e341bcc1790224dff20c8c1d510c7b505"} Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.274987 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerStarted","Data":"c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625"} Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.275689 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.279583 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc","Type":"ContainerStarted","Data":"b77426d5a27eb3609eea859e72da0ab57c475fc3ec836c9ba0a47eb046b015a1"} Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.279635 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc","Type":"ContainerStarted","Data":"a9753f5333e8cf6a6a6e8a5b6bb203dcea93cf7b8757984a96698adeffa9d226"} Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.315042 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.223225972 podStartE2EDuration="5.314997642s" podCreationTimestamp="2025-11-30 07:08:04 +0000 UTC" firstStartedPulling="2025-11-30 07:08:05.077597224 +0000 UTC m=+1305.845768833" lastFinishedPulling="2025-11-30 07:08:08.169368894 +0000 UTC m=+1308.937540503" observedRunningTime="2025-11-30 07:08:09.310862756 +0000 UTC m=+1310.079034395" watchObservedRunningTime="2025-11-30 07:08:09.314997642 +0000 UTC m=+1310.083169271" Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.338028 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.338000475 podStartE2EDuration="2.338000475s" podCreationTimestamp="2025-11-30 07:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:08:09.327247066 +0000 UTC m=+1310.095418745" watchObservedRunningTime="2025-11-30 07:08:09.338000475 +0000 UTC m=+1310.106172125" Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.595798 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.186:8775/\": read tcp 10.217.0.2:51388->10.217.0.186:8775: read: connection reset by peer" Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.595853 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.186:8775/\": read tcp 10.217.0.2:51396->10.217.0.186:8775: read: connection reset by peer" Nov 30 07:08:09 crc kubenswrapper[4941]: I1130 07:08:09.994616 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.147535 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-combined-ca-bundle\") pod \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.147582 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w97f8\" (UniqueName: \"kubernetes.io/projected/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-kube-api-access-w97f8\") pod \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.147602 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-logs\") pod \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.147620 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-config-data\") pod \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.147820 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-nova-metadata-tls-certs\") pod \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\" (UID: \"b310ca61-cc2e-42ae-9f15-79f8c6f38b46\") " Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.149499 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-logs" (OuterVolumeSpecName: "logs") pod "b310ca61-cc2e-42ae-9f15-79f8c6f38b46" (UID: "b310ca61-cc2e-42ae-9f15-79f8c6f38b46"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.154228 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-kube-api-access-w97f8" (OuterVolumeSpecName: "kube-api-access-w97f8") pod "b310ca61-cc2e-42ae-9f15-79f8c6f38b46" (UID: "b310ca61-cc2e-42ae-9f15-79f8c6f38b46"). InnerVolumeSpecName "kube-api-access-w97f8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.182805 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b310ca61-cc2e-42ae-9f15-79f8c6f38b46" (UID: "b310ca61-cc2e-42ae-9f15-79f8c6f38b46"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.187307 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-config-data" (OuterVolumeSpecName: "config-data") pod "b310ca61-cc2e-42ae-9f15-79f8c6f38b46" (UID: "b310ca61-cc2e-42ae-9f15-79f8c6f38b46"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.214355 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b310ca61-cc2e-42ae-9f15-79f8c6f38b46" (UID: "b310ca61-cc2e-42ae-9f15-79f8c6f38b46"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.249930 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.249965 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.249978 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w97f8\" (UniqueName: \"kubernetes.io/projected/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-kube-api-access-w97f8\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.249993 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.250002 4941 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b310ca61-cc2e-42ae-9f15-79f8c6f38b46-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.290228 4941 generic.go:334] "Generic (PLEG): container finished" podID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerID="18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f" exitCode=0 Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.291314 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.295726 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b310ca61-cc2e-42ae-9f15-79f8c6f38b46","Type":"ContainerDied","Data":"18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f"} Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.295781 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b310ca61-cc2e-42ae-9f15-79f8c6f38b46","Type":"ContainerDied","Data":"62b0f017272706249ee9e24691957f01c744f70be312aec01dd64ad948cf47b9"} Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.295803 4941 scope.go:117] "RemoveContainer" containerID="18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.324560 4941 scope.go:117] "RemoveContainer" containerID="b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.336997 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.349966 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.352036 4941 scope.go:117] "RemoveContainer" containerID="18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f" Nov 30 07:08:10 crc kubenswrapper[4941]: E1130 07:08:10.355594 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f\": container with ID starting with 18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f not found: ID does not exist" containerID="18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.355653 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f"} err="failed to get container status \"18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f\": rpc error: code = NotFound desc = could not find container \"18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f\": container with ID starting with 18175162828c19d7d3fb2249ef0b14af4014a5a6cee668a9e6d396dadc2a989f not found: ID does not exist" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.355691 4941 scope.go:117] "RemoveContainer" containerID="b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d" Nov 30 07:08:10 crc kubenswrapper[4941]: E1130 07:08:10.356050 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d\": container with ID starting with b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d not found: ID does not exist" containerID="b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.356092 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d"} err="failed to get container status \"b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d\": rpc error: code = NotFound desc = could not find container \"b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d\": container with ID starting with b0e62ff4aec3f75bb7cfc65f5af82a093debe70eefbaee4c2482d39ecf9c532d not found: ID does not exist" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.374359 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:10 crc kubenswrapper[4941]: E1130 07:08:10.374754 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-metadata" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.374772 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-metadata" Nov 30 07:08:10 crc kubenswrapper[4941]: E1130 07:08:10.374788 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-log" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.374795 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-log" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.374969 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-log" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.374994 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" containerName="nova-metadata-metadata" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.375936 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.381566 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.382317 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.382483 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.554195 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szdql\" (UniqueName: \"kubernetes.io/projected/3315d9fd-71da-4f22-98d8-7142da896aab-kube-api-access-szdql\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.554255 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-config-data\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.554296 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3315d9fd-71da-4f22-98d8-7142da896aab-logs\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.554359 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.554386 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.656295 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szdql\" (UniqueName: \"kubernetes.io/projected/3315d9fd-71da-4f22-98d8-7142da896aab-kube-api-access-szdql\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.656383 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-config-data\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.656435 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3315d9fd-71da-4f22-98d8-7142da896aab-logs\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.656504 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.656536 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.658143 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3315d9fd-71da-4f22-98d8-7142da896aab-logs\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.660703 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.660860 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-config-data\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.662047 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.678285 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szdql\" (UniqueName: \"kubernetes.io/projected/3315d9fd-71da-4f22-98d8-7142da896aab-kube-api-access-szdql\") pod \"nova-metadata-0\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " pod="openstack/nova-metadata-0" Nov 30 07:08:10 crc kubenswrapper[4941]: I1130 07:08:10.712021 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:08:11 crc kubenswrapper[4941]: I1130 07:08:11.140758 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:11 crc kubenswrapper[4941]: W1130 07:08:11.145976 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3315d9fd_71da_4f22_98d8_7142da896aab.slice/crio-4d3514728f85901cd14b4e4daaaf405eb1384a94683b10d90ca7c50764b4cfbf WatchSource:0}: Error finding container 4d3514728f85901cd14b4e4daaaf405eb1384a94683b10d90ca7c50764b4cfbf: Status 404 returned error can't find the container with id 4d3514728f85901cd14b4e4daaaf405eb1384a94683b10d90ca7c50764b4cfbf Nov 30 07:08:11 crc kubenswrapper[4941]: I1130 07:08:11.298983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3315d9fd-71da-4f22-98d8-7142da896aab","Type":"ContainerStarted","Data":"4d3514728f85901cd14b4e4daaaf405eb1384a94683b10d90ca7c50764b4cfbf"} Nov 30 07:08:11 crc kubenswrapper[4941]: I1130 07:08:11.534708 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b310ca61-cc2e-42ae-9f15-79f8c6f38b46" path="/var/lib/kubelet/pods/b310ca61-cc2e-42ae-9f15-79f8c6f38b46/volumes" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.163427 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.292271 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlgnd\" (UniqueName: \"kubernetes.io/projected/62484167-fde9-4dcf-9363-ae6eb0a12d3c-kube-api-access-hlgnd\") pod \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.292511 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-config-data\") pod \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.292555 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-combined-ca-bundle\") pod \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\" (UID: \"62484167-fde9-4dcf-9363-ae6eb0a12d3c\") " Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.299642 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62484167-fde9-4dcf-9363-ae6eb0a12d3c-kube-api-access-hlgnd" (OuterVolumeSpecName: "kube-api-access-hlgnd") pod "62484167-fde9-4dcf-9363-ae6eb0a12d3c" (UID: "62484167-fde9-4dcf-9363-ae6eb0a12d3c"). InnerVolumeSpecName "kube-api-access-hlgnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.318156 4941 generic.go:334] "Generic (PLEG): container finished" podID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" exitCode=0 Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.318265 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"62484167-fde9-4dcf-9363-ae6eb0a12d3c","Type":"ContainerDied","Data":"7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f"} Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.318367 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"62484167-fde9-4dcf-9363-ae6eb0a12d3c","Type":"ContainerDied","Data":"6ef2da646941c64fdfce0fc1bd0e5b9f598ce2f46f6fc8a3e4b9ca624e87b18e"} Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.318401 4941 scope.go:117] "RemoveContainer" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.318485 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.328741 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3315d9fd-71da-4f22-98d8-7142da896aab","Type":"ContainerStarted","Data":"fd044d0caf33128dc424d871cee00e10115231eb3535e2249f4d39b8c3c04af1"} Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.328782 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3315d9fd-71da-4f22-98d8-7142da896aab","Type":"ContainerStarted","Data":"80eaa1c0bcf74c714669821c2f8c3b37c333106eca38a0ef24d09c0faa3ec4c0"} Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.334144 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62484167-fde9-4dcf-9363-ae6eb0a12d3c" (UID: "62484167-fde9-4dcf-9363-ae6eb0a12d3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.361683 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-config-data" (OuterVolumeSpecName: "config-data") pod "62484167-fde9-4dcf-9363-ae6eb0a12d3c" (UID: "62484167-fde9-4dcf-9363-ae6eb0a12d3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.384679 4941 scope.go:117] "RemoveContainer" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" Nov 30 07:08:12 crc kubenswrapper[4941]: E1130 07:08:12.385206 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f\": container with ID starting with 7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f not found: ID does not exist" containerID="7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.385261 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f"} err="failed to get container status \"7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f\": rpc error: code = NotFound desc = could not find container \"7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f\": container with ID starting with 7801f559fb3a14ceb52fe81a9ce23adb9dacfe9ee7185402f6d99cee2a365a5f not found: ID does not exist" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.394484 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.394509 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62484167-fde9-4dcf-9363-ae6eb0a12d3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.394521 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlgnd\" (UniqueName: \"kubernetes.io/projected/62484167-fde9-4dcf-9363-ae6eb0a12d3c-kube-api-access-hlgnd\") on node \"crc\" DevicePath \"\"" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.646097 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.646082025 podStartE2EDuration="2.646082025s" podCreationTimestamp="2025-11-30 07:08:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:08:12.354137023 +0000 UTC m=+1313.122308662" watchObservedRunningTime="2025-11-30 07:08:12.646082025 +0000 UTC m=+1313.414253634" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.652511 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.665954 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.680725 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:12 crc kubenswrapper[4941]: E1130 07:08:12.681202 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" containerName="nova-scheduler-scheduler" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.681219 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" containerName="nova-scheduler-scheduler" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.681495 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" containerName="nova-scheduler-scheduler" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.682179 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.685297 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.693736 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.803001 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.803501 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2vs7\" (UniqueName: \"kubernetes.io/projected/14ac7adf-c7e3-4512-9c65-6361d005b4b7-kube-api-access-n2vs7\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.804074 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-config-data\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.906267 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.906709 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2vs7\" (UniqueName: \"kubernetes.io/projected/14ac7adf-c7e3-4512-9c65-6361d005b4b7-kube-api-access-n2vs7\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.906895 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-config-data\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.911141 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.912024 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-config-data\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:12 crc kubenswrapper[4941]: I1130 07:08:12.934095 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2vs7\" (UniqueName: \"kubernetes.io/projected/14ac7adf-c7e3-4512-9c65-6361d005b4b7-kube-api-access-n2vs7\") pod \"nova-scheduler-0\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " pod="openstack/nova-scheduler-0" Nov 30 07:08:13 crc kubenswrapper[4941]: I1130 07:08:13.033872 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:08:13 crc kubenswrapper[4941]: I1130 07:08:13.533273 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62484167-fde9-4dcf-9363-ae6eb0a12d3c" path="/var/lib/kubelet/pods/62484167-fde9-4dcf-9363-ae6eb0a12d3c/volumes" Nov 30 07:08:13 crc kubenswrapper[4941]: I1130 07:08:13.655404 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:13 crc kubenswrapper[4941]: W1130 07:08:13.656191 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14ac7adf_c7e3_4512_9c65_6361d005b4b7.slice/crio-98b31671fa8fd66e9e61127127016a8c4b60b1daedaf1033ccb1f0f97ad880fd WatchSource:0}: Error finding container 98b31671fa8fd66e9e61127127016a8c4b60b1daedaf1033ccb1f0f97ad880fd: Status 404 returned error can't find the container with id 98b31671fa8fd66e9e61127127016a8c4b60b1daedaf1033ccb1f0f97ad880fd Nov 30 07:08:14 crc kubenswrapper[4941]: I1130 07:08:14.359634 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"14ac7adf-c7e3-4512-9c65-6361d005b4b7","Type":"ContainerStarted","Data":"370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9"} Nov 30 07:08:14 crc kubenswrapper[4941]: I1130 07:08:14.359688 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"14ac7adf-c7e3-4512-9c65-6361d005b4b7","Type":"ContainerStarted","Data":"98b31671fa8fd66e9e61127127016a8c4b60b1daedaf1033ccb1f0f97ad880fd"} Nov 30 07:08:14 crc kubenswrapper[4941]: I1130 07:08:14.381787 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.381765404 podStartE2EDuration="2.381765404s" podCreationTimestamp="2025-11-30 07:08:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:08:14.374315677 +0000 UTC m=+1315.142487286" watchObservedRunningTime="2025-11-30 07:08:14.381765404 +0000 UTC m=+1315.149937023" Nov 30 07:08:15 crc kubenswrapper[4941]: I1130 07:08:15.715548 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:08:15 crc kubenswrapper[4941]: I1130 07:08:15.717248 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 07:08:17 crc kubenswrapper[4941]: I1130 07:08:17.627375 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 07:08:17 crc kubenswrapper[4941]: I1130 07:08:17.627614 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 07:08:18 crc kubenswrapper[4941]: I1130 07:08:18.034950 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 07:08:18 crc kubenswrapper[4941]: I1130 07:08:18.643754 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 07:08:18 crc kubenswrapper[4941]: I1130 07:08:18.643769 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 07:08:20 crc kubenswrapper[4941]: I1130 07:08:20.712855 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 07:08:20 crc kubenswrapper[4941]: I1130 07:08:20.712909 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 07:08:21 crc kubenswrapper[4941]: I1130 07:08:21.725574 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 30 07:08:21 crc kubenswrapper[4941]: I1130 07:08:21.725616 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 30 07:08:23 crc kubenswrapper[4941]: I1130 07:08:23.034814 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 07:08:23 crc kubenswrapper[4941]: I1130 07:08:23.069602 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 07:08:23 crc kubenswrapper[4941]: I1130 07:08:23.494406 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 07:08:27 crc kubenswrapper[4941]: I1130 07:08:27.634412 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 07:08:27 crc kubenswrapper[4941]: I1130 07:08:27.634976 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 07:08:27 crc kubenswrapper[4941]: I1130 07:08:27.635784 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 07:08:27 crc kubenswrapper[4941]: I1130 07:08:27.636111 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 07:08:27 crc kubenswrapper[4941]: I1130 07:08:27.642347 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 07:08:27 crc kubenswrapper[4941]: I1130 07:08:27.643507 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 07:08:30 crc kubenswrapper[4941]: I1130 07:08:30.718058 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 07:08:30 crc kubenswrapper[4941]: I1130 07:08:30.718203 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 07:08:30 crc kubenswrapper[4941]: I1130 07:08:30.721442 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 07:08:30 crc kubenswrapper[4941]: I1130 07:08:30.723579 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 07:08:34 crc kubenswrapper[4941]: I1130 07:08:34.610344 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.853733 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.875591 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="5679f4ed-6882-4f85-93b2-02ccff357b48" containerName="openstackclient" containerID="cri-o://7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de" gracePeriod=2 Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.894882 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.967589 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder43da-account-delete-8nqxg"] Nov 30 07:08:56 crc kubenswrapper[4941]: E1130 07:08:56.968063 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5679f4ed-6882-4f85-93b2-02ccff357b48" containerName="openstackclient" Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.968087 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5679f4ed-6882-4f85-93b2-02ccff357b48" containerName="openstackclient" Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.968304 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5679f4ed-6882-4f85-93b2-02ccff357b48" containerName="openstackclient" Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.969040 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.998099 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 07:08:56 crc kubenswrapper[4941]: I1130 07:08:56.998423 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="openstack-network-exporter" containerID="cri-o://e2122d103d04cc614de8738002d8a386052f20ec6347c472adc19bc507389c00" gracePeriod=300 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.024857 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder43da-account-delete-8nqxg"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.053389 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-5zxfl"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.072519 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7jbx\" (UniqueName: \"kubernetes.io/projected/c4c68094-a8b8-4327-9ae1-335226d3b938-kube-api-access-f7jbx\") pod \"cinder43da-account-delete-8nqxg\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.072576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4c68094-a8b8-4327-9ae1-335226d3b938-operator-scripts\") pod \"cinder43da-account-delete-8nqxg\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.084368 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-5zxfl"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.132392 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.133072 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="openstack-network-exporter" containerID="cri-o://d012ac9e18256da2434ecdcf48894a12178c369b643c9b30f25771ee1c4fba92" gracePeriod=300 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.160236 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement6f8e-account-delete-sd7xt"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.161593 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.175907 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7jbx\" (UniqueName: \"kubernetes.io/projected/c4c68094-a8b8-4327-9ae1-335226d3b938-kube-api-access-f7jbx\") pod \"cinder43da-account-delete-8nqxg\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.175953 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4c68094-a8b8-4327-9ae1-335226d3b938-operator-scripts\") pod \"cinder43da-account-delete-8nqxg\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.176656 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4c68094-a8b8-4327-9ae1-335226d3b938-operator-scripts\") pod \"cinder43da-account-delete-8nqxg\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.181393 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement6f8e-account-delete-sd7xt"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.210387 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.227043 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican75ae-account-delete-7krwx"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.241675 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.257741 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7jbx\" (UniqueName: \"kubernetes.io/projected/c4c68094-a8b8-4327-9ae1-335226d3b938-kube-api-access-f7jbx\") pod \"cinder43da-account-delete-8nqxg\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.279557 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bb6c559-5c94-43b0-b6f0-3992652c720f-operator-scripts\") pod \"barbican75ae-account-delete-7krwx\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.279719 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5q4b\" (UniqueName: \"kubernetes.io/projected/4f0e9278-1a7f-400f-8a07-31f88c84814b-kube-api-access-c5q4b\") pod \"placement6f8e-account-delete-sd7xt\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.279754 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f0e9278-1a7f-400f-8a07-31f88c84814b-operator-scripts\") pod \"placement6f8e-account-delete-sd7xt\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.279802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp9m8\" (UniqueName: \"kubernetes.io/projected/1bb6c559-5c94-43b0-b6f0-3992652c720f-kube-api-access-rp9m8\") pod \"barbican75ae-account-delete-7krwx\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: E1130 07:08:57.280934 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 30 07:08:57 crc kubenswrapper[4941]: E1130 07:08:57.280979 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data podName:45978317-0f07-44da-8b74-fbaaec0e6105 nodeName:}" failed. No retries permitted until 2025-11-30 07:08:57.780965158 +0000 UTC m=+1358.549136767 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data") pod "rabbitmq-cell1-server-0" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105") : configmap "rabbitmq-cell1-config-data" not found Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.288718 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican75ae-account-delete-7krwx"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.299741 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.382380 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutrona2cc-account-delete-262pl"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.383598 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp9m8\" (UniqueName: \"kubernetes.io/projected/1bb6c559-5c94-43b0-b6f0-3992652c720f-kube-api-access-rp9m8\") pod \"barbican75ae-account-delete-7krwx\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.383634 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.383678 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bb6c559-5c94-43b0-b6f0-3992652c720f-operator-scripts\") pod \"barbican75ae-account-delete-7krwx\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.383782 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5q4b\" (UniqueName: \"kubernetes.io/projected/4f0e9278-1a7f-400f-8a07-31f88c84814b-kube-api-access-c5q4b\") pod \"placement6f8e-account-delete-sd7xt\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.383817 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f0e9278-1a7f-400f-8a07-31f88c84814b-operator-scripts\") pod \"placement6f8e-account-delete-sd7xt\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.383597 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="ovsdbserver-nb" containerID="cri-o://eba3ae86f525c89da8b2e9bcc6b9bd66ffd75c1417b2bfa7df563071df5d0c63" gracePeriod=300 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.384498 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f0e9278-1a7f-400f-8a07-31f88c84814b-operator-scripts\") pod \"placement6f8e-account-delete-sd7xt\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.385035 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bb6c559-5c94-43b0-b6f0-3992652c720f-operator-scripts\") pod \"barbican75ae-account-delete-7krwx\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.410103 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutrona2cc-account-delete-262pl"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.428510 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="ovsdbserver-sb" containerID="cri-o://07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942" gracePeriod=300 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.476930 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp9m8\" (UniqueName: \"kubernetes.io/projected/1bb6c559-5c94-43b0-b6f0-3992652c720f-kube-api-access-rp9m8\") pod \"barbican75ae-account-delete-7krwx\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.485905 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bad7c3f7-8cec-4baf-808a-43184771d1da-operator-scripts\") pod \"neutrona2cc-account-delete-262pl\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.486079 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmnl6\" (UniqueName: \"kubernetes.io/projected/bad7c3f7-8cec-4baf-808a-43184771d1da-kube-api-access-fmnl6\") pod \"neutrona2cc-account-delete-262pl\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.487301 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5q4b\" (UniqueName: \"kubernetes.io/projected/4f0e9278-1a7f-400f-8a07-31f88c84814b-kube-api-access-c5q4b\") pod \"placement6f8e-account-delete-sd7xt\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.502009 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glancec0c5-account-delete-fx6st"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.503758 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.507825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.533825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.541909 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ee0724a-81f2-4b40-959f-e831f4d9abf5" path="/var/lib/kubelet/pods/2ee0724a-81f2-4b40-959f-e831f4d9abf5/volumes" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.543250 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancec0c5-account-delete-fx6st"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.555439 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.588850 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmnl6\" (UniqueName: \"kubernetes.io/projected/bad7c3f7-8cec-4baf-808a-43184771d1da-kube-api-access-fmnl6\") pod \"neutrona2cc-account-delete-262pl\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.588923 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bad7c3f7-8cec-4baf-808a-43184771d1da-operator-scripts\") pod \"neutrona2cc-account-delete-262pl\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.589768 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bad7c3f7-8cec-4baf-808a-43184771d1da-operator-scripts\") pod \"neutrona2cc-account-delete-262pl\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.611360 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jlmlm"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.683379 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmnl6\" (UniqueName: \"kubernetes.io/projected/bad7c3f7-8cec-4baf-808a-43184771d1da-kube-api-access-fmnl6\") pod \"neutrona2cc-account-delete-262pl\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.693592 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxfdd\" (UniqueName: \"kubernetes.io/projected/d229a913-5522-4197-be77-fad9a0187f74-kube-api-access-gxfdd\") pod \"glancec0c5-account-delete-fx6st\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.693685 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d229a913-5522-4197-be77-fad9a0187f74-operator-scripts\") pod \"glancec0c5-account-delete-fx6st\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: E1130 07:08:57.698062 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 30 07:08:57 crc kubenswrapper[4941]: E1130 07:08:57.698116 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data podName:4e7a5ee5-1f0c-4819-a375-891a5e2cea03 nodeName:}" failed. No retries permitted until 2025-11-30 07:08:58.198102375 +0000 UTC m=+1358.966273984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data") pod "rabbitmq-server-0" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03") : configmap "rabbitmq-config-data" not found Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.703789 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-slfpx"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.743821 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jlmlm"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.784019 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-rwxvw"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.785716 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-rwxvw" podUID="94cbff61-3614-4efd-b4ba-36bef65f2ae7" containerName="openstack-network-exporter" containerID="cri-o://c0d359c73119f6c2eb67ed8df045fe124ba61df2f455244a61a488956e3a2e34" gracePeriod=30 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.794952 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxfdd\" (UniqueName: \"kubernetes.io/projected/d229a913-5522-4197-be77-fad9a0187f74-kube-api-access-gxfdd\") pod \"glancec0c5-account-delete-fx6st\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.795013 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d229a913-5522-4197-be77-fad9a0187f74-operator-scripts\") pod \"glancec0c5-account-delete-fx6st\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.795761 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d229a913-5522-4197-be77-fad9a0187f74-operator-scripts\") pod \"glancec0c5-account-delete-fx6st\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: E1130 07:08:57.795817 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 30 07:08:57 crc kubenswrapper[4941]: E1130 07:08:57.795855 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data podName:45978317-0f07-44da-8b74-fbaaec0e6105 nodeName:}" failed. No retries permitted until 2025-11-30 07:08:58.795843921 +0000 UTC m=+1359.564015530 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data") pod "rabbitmq-cell1-server-0" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105") : configmap "rabbitmq-cell1-config-data" not found Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.830051 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-lknbl"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.844795 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxfdd\" (UniqueName: \"kubernetes.io/projected/d229a913-5522-4197-be77-fad9a0187f74-kube-api-access-gxfdd\") pod \"glancec0c5-account-delete-fx6st\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.845218 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.901050 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerID="e2122d103d04cc614de8738002d8a386052f20ec6347c472adc19bc507389c00" exitCode=2 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.901187 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ea6e32f6-a5d9-4b23-9588-2ea6be572e72","Type":"ContainerDied","Data":"e2122d103d04cc614de8738002d8a386052f20ec6347c472adc19bc507389c00"} Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.907761 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-lknbl"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.938845 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_14ce638b-1621-451a-80b6-0e13b6ffb734/ovsdbserver-sb/0.log" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.938893 4941 generic.go:334] "Generic (PLEG): container finished" podID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerID="d012ac9e18256da2434ecdcf48894a12178c369b643c9b30f25771ee1c4fba92" exitCode=2 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.938913 4941 generic.go:334] "Generic (PLEG): container finished" podID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerID="07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942" exitCode=143 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.938936 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"14ce638b-1621-451a-80b6-0e13b6ffb734","Type":"ContainerDied","Data":"d012ac9e18256da2434ecdcf48894a12178c369b643c9b30f25771ee1c4fba92"} Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.938978 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"14ce638b-1621-451a-80b6-0e13b6ffb734","Type":"ContainerDied","Data":"07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942"} Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.944489 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-zcbz9"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.953005 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.984602 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.984888 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="ovn-northd" containerID="cri-o://eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" gracePeriod=30 Nov 30 07:08:57 crc kubenswrapper[4941]: I1130 07:08:57.985286 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="openstack-network-exporter" containerID="cri-o://580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.009829 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-hbkr7"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.040422 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-hbkr7"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.082399 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-wtqm6"] Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.102554 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.104390 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-wtqm6"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.153502 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi2ee8-account-delete-jr5gz"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.155179 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.195387 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi2ee8-account-delete-jr5gz"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.236359 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts\") pod \"novaapi2ee8-account-delete-jr5gz\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.236469 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsb9w\" (UniqueName: \"kubernetes.io/projected/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-kube-api-access-wsb9w\") pod \"novaapi2ee8-account-delete-jr5gz\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.236624 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.236666 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data podName:4e7a5ee5-1f0c-4819-a375-891a5e2cea03 nodeName:}" failed. No retries permitted until 2025-11-30 07:08:59.236651301 +0000 UTC m=+1360.004822910 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data") pod "rabbitmq-server-0" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03") : configmap "rabbitmq-config-data" not found Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.244857 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.273574 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.273814 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" containerName="dnsmasq-dns" containerID="cri-o://1de6943804783b4a852eb14e98f7041622c4c2594236d04ba6c18ed97142b408" gracePeriod=10 Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.295615 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.295681 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="ovn-northd" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.297492 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-rskk6"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.340711 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts\") pod \"novaapi2ee8-account-delete-jr5gz\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.341118 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsb9w\" (UniqueName: \"kubernetes.io/projected/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-kube-api-access-wsb9w\") pod \"novaapi2ee8-account-delete-jr5gz\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.342628 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts\") pod \"novaapi2ee8-account-delete-jr5gz\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.390397 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.390945 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="cinder-scheduler" containerID="cri-o://a96ce6ca3db082f07bc30a85b2dd3ed276f669d63c74e2397c8b08ebfa4f983e" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.391382 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="probe" containerID="cri-o://901debcd5ec58b9fd628099c8ca6fab3d5432f5cf3c027e27b344f81cf8d7260" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.464463 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell00ef1-account-delete-jbdnp"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.465723 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.559906 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-rskk6"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.571700 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsb9w\" (UniqueName: \"kubernetes.io/projected/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-kube-api-access-wsb9w\") pod \"novaapi2ee8-account-delete-jr5gz\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.574413 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6br9\" (UniqueName: \"kubernetes.io/projected/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-kube-api-access-f6br9\") pod \"novacell00ef1-account-delete-jbdnp\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.574519 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-operator-scripts\") pod \"novacell00ef1-account-delete-jbdnp\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.605290 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell00ef1-account-delete-jbdnp"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.640618 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.640843 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api-log" containerID="cri-o://305c3aba8af09bc4d0a3d63208f7e2949135c2ee18761aac46287cf1068feab1" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.641218 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api" containerID="cri-o://96e02e36152e3e77bc3bcb37965f0c42f78aaeec0dbbaa72be4ad8497a927704" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.664773 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665268 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-server" containerID="cri-o://878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665634 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="swift-recon-cron" containerID="cri-o://70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665681 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="rsync" containerID="cri-o://6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665711 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-expirer" containerID="cri-o://e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665754 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-updater" containerID="cri-o://7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665781 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-auditor" containerID="cri-o://5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665811 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-replicator" containerID="cri-o://2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665840 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-server" containerID="cri-o://23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665868 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-updater" containerID="cri-o://b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665897 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-auditor" containerID="cri-o://0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665928 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-replicator" containerID="cri-o://214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665958 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-server" containerID="cri-o://7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.665988 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-reaper" containerID="cri-o://dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.666019 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-auditor" containerID="cri-o://30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.666047 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-replicator" containerID="cri-o://a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.687145 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6br9\" (UniqueName: \"kubernetes.io/projected/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-kube-api-access-f6br9\") pod \"novacell00ef1-account-delete-jbdnp\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.687209 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-operator-scripts\") pod \"novacell00ef1-account-delete-jbdnp\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.688127 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-operator-scripts\") pod \"novacell00ef1-account-delete-jbdnp\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.708371 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7c9965466b-7rmfq"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.708676 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7c9965466b-7rmfq" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-log" containerID="cri-o://539d89a15edc70dc4c19cc2280d7df494d363b3461a1f1f53b5b2c3c6f64de13" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.709191 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-7c9965466b-7rmfq" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-api" containerID="cri-o://009377a60fa215ae8250c38bc50dca1c5bfb321e79ea6ee7dd44dd9fbdcf7b42" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.737037 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-hww95"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.741135 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6br9\" (UniqueName: \"kubernetes.io/projected/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-kube-api-access-f6br9\") pod \"novacell00ef1-account-delete-jbdnp\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.759856 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-6hd2t"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.768212 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-hww95"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.796581 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-6hd2t"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.829520 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5678756fc7-642xv"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.830635 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5678756fc7-642xv" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-httpd" containerID="cri-o://539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.830672 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5678756fc7-642xv" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-api" containerID="cri-o://2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.837718 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.840576 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.840835 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-log" containerID="cri-o://68e80ed464e919522f447820599362734c316c8d7a5459adff802d5f94a52e5f" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.841006 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-httpd" containerID="cri-o://dfb235dd19464a6b133288b3327f7e345c63de23737ba8b6a478ad9216727e2b" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.848157 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder43da-account-delete-8nqxg"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.865609 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.880689 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7dc97f7589-7ghf8"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.880995 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7dc97f7589-7ghf8" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-httpd" containerID="cri-o://1b70a2a767c9e03c6d699f586af8bd64fc30e67891d157e6de0aa7801367dc48" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.881556 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7dc97f7589-7ghf8" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-server" containerID="cri-o://f817ede79334101b289bba0c90e92e4faabbe95f8192168498ec3ad0a4f103ef" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.893462 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.893734 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-log" containerID="cri-o://9d4cad614888876b728ec4d308e74ba608a61af7f849e758a89e8b1fc6414299" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.894200 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-httpd" containerID="cri-o://6a679ded02687521bbab8ba0d141ec44027e549ed85d08f01b4106e0cd6db8b8" gracePeriod=30 Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.897687 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 30 07:08:58 crc kubenswrapper[4941]: E1130 07:08:58.897756 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data podName:45978317-0f07-44da-8b74-fbaaec0e6105 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:00.897738733 +0000 UTC m=+1361.665910342 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data") pod "rabbitmq-cell1-server-0" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105") : configmap "rabbitmq-cell1-config-data" not found Nov 30 07:08:58 crc kubenswrapper[4941]: I1130 07:08:58.911851 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:58.950642 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7987c5dbd6-p8ncc"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:58.950874 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener-log" containerID="cri-o://ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:58.951304 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener" containerID="cri-o://83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:58.969396 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:58.969695 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-log" containerID="cri-o://80eaa1c0bcf74c714669821c2f8c3b37c333106eca38a0ef24d09c0faa3ec4c0" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:58.970246 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-metadata" containerID="cri-o://fd044d0caf33128dc424d871cee00e10115231eb3535e2249f4d39b8c3c04af1" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.009658 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.045865 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-74d6754465-nglc5"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046118 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-74d6754465-nglc5" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker-log" containerID="cri-o://73cb23f3d6e038846d4f5b8f18403308641c3a8d002d0c1fd63ffe41c380b3ef" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046235 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-74d6754465-nglc5" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker" containerID="cri-o://012634e55142fed4221ecd68adf1f8141133456f2d0eb8c2e3aca13cafe2681c" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046570 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046599 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046605 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046612 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046619 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046627 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046633 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046639 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046702 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046730 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046743 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046753 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046762 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046770 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046778 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.046787 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.053387 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.053639 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-log" containerID="cri-o://a9753f5333e8cf6a6a6e8a5b6bb203dcea93cf7b8757984a96698adeffa9d226" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.053999 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-api" containerID="cri-o://b77426d5a27eb3609eea859e72da0ab57c475fc3ec836c9ba0a47eb046b015a1" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.060473 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-4qtbv"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.073507 4941 generic.go:334] "Generic (PLEG): container finished" podID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerID="305c3aba8af09bc4d0a3d63208f7e2949135c2ee18761aac46287cf1068feab1" exitCode=143 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.073606 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-4qtbv"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.073631 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1822cd0b-b52d-49d7-b787-a1091edfc585","Type":"ContainerDied","Data":"305c3aba8af09bc4d0a3d63208f7e2949135c2ee18761aac46287cf1068feab1"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.084678 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6794dcdcd8-t9v24"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.084889 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6794dcdcd8-t9v24" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api-log" containerID="cri-o://aeb000c0386dce47c7a56b3872cfb39b10d750b9a481ebdd2c575d97c0bbecbd" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.085416 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6794dcdcd8-t9v24" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api" containerID="cri-o://26ada5d7acc3a461a8a330fae6ff00d6ad1801a8caa3a433600d9307e3c1a50d" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.087861 4941 generic.go:334] "Generic (PLEG): container finished" podID="674f83a3-0419-43d7-a679-fed1bf09b047" containerID="1de6943804783b4a852eb14e98f7041622c4c2594236d04ba6c18ed97142b408" exitCode=0 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.087940 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" event={"ID":"674f83a3-0419-43d7-a679-fed1bf09b047","Type":"ContainerDied","Data":"1de6943804783b4a852eb14e98f7041622c4c2594236d04ba6c18ed97142b408"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.093892 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.112843 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rwxvw_94cbff61-3614-4efd-b4ba-36bef65f2ae7/openstack-network-exporter/0.log" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.112954 4941 generic.go:334] "Generic (PLEG): container finished" podID="94cbff61-3614-4efd-b4ba-36bef65f2ae7" containerID="c0d359c73119f6c2eb67ed8df045fe124ba61df2f455244a61a488956e3a2e34" exitCode=2 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.113106 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rwxvw" event={"ID":"94cbff61-3614-4efd-b4ba-36bef65f2ae7","Type":"ContainerDied","Data":"c0d359c73119f6c2eb67ed8df045fe124ba61df2f455244a61a488956e3a2e34"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.117735 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-77d0-account-create-update-25gtl"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.131624 4941 generic.go:334] "Generic (PLEG): container finished" podID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerID="539d89a15edc70dc4c19cc2280d7df494d363b3461a1f1f53b5b2c3c6f64de13" exitCode=143 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.131727 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c9965466b-7rmfq" event={"ID":"7a758cc8-4546-4982-b2a7-b7824ecfc118","Type":"ContainerDied","Data":"539d89a15edc70dc4c19cc2280d7df494d363b3461a1f1f53b5b2c3c6f64de13"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.138151 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-77d0-account-create-update-25gtl"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.145363 4941 generic.go:334] "Generic (PLEG): container finished" podID="322fb449-5599-45af-97e2-158692366d9b" containerID="580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283" exitCode=2 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.145447 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"322fb449-5599-45af-97e2-158692366d9b","Type":"ContainerDied","Data":"580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.156429 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.156782 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="f311bacd-2cef-44fe-95c4-38a7462cd4a6" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://199b3e26fc0f6545467dc0bfc386bbb742110c36e2dd4105695b124948c0d840" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.173741 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.173983 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" containerName="nova-scheduler-scheduler" containerID="cri-o://370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.196948 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ea6e32f6-a5d9-4b23-9588-2ea6be572e72/ovsdbserver-nb/0.log" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.196986 4941 generic.go:334] "Generic (PLEG): container finished" podID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerID="eba3ae86f525c89da8b2e9bcc6b9bd66ffd75c1417b2bfa7df563071df5d0c63" exitCode=143 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.197079 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ea6e32f6-a5d9-4b23-9588-2ea6be572e72","Type":"ContainerDied","Data":"eba3ae86f525c89da8b2e9bcc6b9bd66ffd75c1417b2bfa7df563071df5d0c63"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.208004 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder43da-account-delete-8nqxg" event={"ID":"c4c68094-a8b8-4327-9ae1-335226d3b938","Type":"ContainerStarted","Data":"f2509a49bab6ccbd75baa4546123fd004bc0541d9ef823d46c73276c9fc37974"} Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.298685 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jn7qf"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.317379 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.317601 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" containerName="nova-cell1-conductor-conductor" containerID="cri-o://54724f9e7b9cb397a6dbf2bf4b7b305271ebdeef511fa6d99df016ffce4d170d" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.319827 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.319870 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data podName:4e7a5ee5-1f0c-4819-a375-891a5e2cea03 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:01.319857302 +0000 UTC m=+1362.088028911 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data") pod "rabbitmq-server-0" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03") : configmap "rabbitmq-config-data" not found Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.335574 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-jn7qf"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.362878 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9r94"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.364147 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="rabbitmq" containerID="cri-o://9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f" gracePeriod=604800 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.376095 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.376281 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="88971618-54e2-4670-be08-a6ae63ed99df" containerName="nova-cell0-conductor-conductor" containerID="cri-o://ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.377547 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="rabbitmq" containerID="cri-o://0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08" gracePeriod=604800 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.389109 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9r94"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.486270 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.495625 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.544376 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.544762 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="88971618-54e2-4670-be08-a6ae63ed99df" containerName="nova-cell0-conductor-conductor" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.620532 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02e23704-d56c-4d7b-81f9-729b38a5c39e" path="/var/lib/kubelet/pods/02e23704-d56c-4d7b-81f9-729b38a5c39e/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.621583 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="130403d6-7916-4482-8ced-0a334b1ca222" path="/var/lib/kubelet/pods/130403d6-7916-4482-8ced-0a334b1ca222/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.622426 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c1461b3-8d1d-4812-a241-ad0a1a962c35" path="/var/lib/kubelet/pods/2c1461b3-8d1d-4812-a241-ad0a1a962c35/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.636134 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="414c3399-b5e5-4e28-a1ab-d646fa3193fe" path="/var/lib/kubelet/pods/414c3399-b5e5-4e28-a1ab-d646fa3193fe/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.637342 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1" path="/var/lib/kubelet/pods/6e2ae7f7-ed16-40e2-a5ee-638d5f95f0c1/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.638273 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f615e12-dbe4-4037-ae70-3fe72dade25a" path="/var/lib/kubelet/pods/6f615e12-dbe4-4037-ae70-3fe72dade25a/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.639570 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="940ccc92-8cc8-4e78-b862-a7f5fa3d9288" path="/var/lib/kubelet/pods/940ccc92-8cc8-4e78-b862-a7f5fa3d9288/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.640215 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3f11ab2-1455-484c-8d3d-a09bf34a6f72" path="/var/lib/kubelet/pods/a3f11ab2-1455-484c-8d3d-a09bf34a6f72/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.640833 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb8e782b-25d0-452c-a19a-7e43765f4200" path="/var/lib/kubelet/pods/bb8e782b-25d0-452c-a19a-7e43765f4200/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.642177 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4e9c60f-d375-4332-ab45-93a3e96be457" path="/var/lib/kubelet/pods/d4e9c60f-d375-4332-ab45-93a3e96be457/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.642716 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddbc8aeb-8359-4427-843f-c6e2377e2857" path="/var/lib/kubelet/pods/ddbc8aeb-8359-4427-843f-c6e2377e2857/volumes" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.685498 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" containerID="cri-o://ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" gracePeriod=29 Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.754459 4941 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Nov 30 07:08:59 crc kubenswrapper[4941]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 30 07:08:59 crc kubenswrapper[4941]: + source /usr/local/bin/container-scripts/functions Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNBridge=br-int Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNRemote=tcp:localhost:6642 Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNEncapType=geneve Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNAvailabilityZones= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ EnableChassisAsGateway=true Nov 30 07:08:59 crc kubenswrapper[4941]: ++ PhysicalNetworks= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNHostName= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 30 07:08:59 crc kubenswrapper[4941]: ++ ovs_dir=/var/lib/openvswitch Nov 30 07:08:59 crc kubenswrapper[4941]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 30 07:08:59 crc kubenswrapper[4941]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 30 07:08:59 crc kubenswrapper[4941]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + cleanup_ovsdb_server_semaphore Nov 30 07:08:59 crc kubenswrapper[4941]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 30 07:08:59 crc kubenswrapper[4941]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 30 07:08:59 crc kubenswrapper[4941]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-slfpx" message=< Nov 30 07:08:59 crc kubenswrapper[4941]: Exiting ovsdb-server (5) [ OK ] Nov 30 07:08:59 crc kubenswrapper[4941]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 30 07:08:59 crc kubenswrapper[4941]: + source /usr/local/bin/container-scripts/functions Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNBridge=br-int Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNRemote=tcp:localhost:6642 Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNEncapType=geneve Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNAvailabilityZones= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ EnableChassisAsGateway=true Nov 30 07:08:59 crc kubenswrapper[4941]: ++ PhysicalNetworks= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNHostName= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 30 07:08:59 crc kubenswrapper[4941]: ++ ovs_dir=/var/lib/openvswitch Nov 30 07:08:59 crc kubenswrapper[4941]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 30 07:08:59 crc kubenswrapper[4941]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 30 07:08:59 crc kubenswrapper[4941]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + cleanup_ovsdb_server_semaphore Nov 30 07:08:59 crc kubenswrapper[4941]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 30 07:08:59 crc kubenswrapper[4941]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 30 07:08:59 crc kubenswrapper[4941]: > Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.754513 4941 kuberuntime_container.go:691] "PreStop hook failed" err=< Nov 30 07:08:59 crc kubenswrapper[4941]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 30 07:08:59 crc kubenswrapper[4941]: + source /usr/local/bin/container-scripts/functions Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNBridge=br-int Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNRemote=tcp:localhost:6642 Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNEncapType=geneve Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNAvailabilityZones= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ EnableChassisAsGateway=true Nov 30 07:08:59 crc kubenswrapper[4941]: ++ PhysicalNetworks= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ OVNHostName= Nov 30 07:08:59 crc kubenswrapper[4941]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 30 07:08:59 crc kubenswrapper[4941]: ++ ovs_dir=/var/lib/openvswitch Nov 30 07:08:59 crc kubenswrapper[4941]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 30 07:08:59 crc kubenswrapper[4941]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 30 07:08:59 crc kubenswrapper[4941]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + sleep 0.5 Nov 30 07:08:59 crc kubenswrapper[4941]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 30 07:08:59 crc kubenswrapper[4941]: + cleanup_ovsdb_server_semaphore Nov 30 07:08:59 crc kubenswrapper[4941]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 30 07:08:59 crc kubenswrapper[4941]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 30 07:08:59 crc kubenswrapper[4941]: > pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" containerID="cri-o://de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.754554 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" containerID="cri-o://de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" gracePeriod=28 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.811377 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="fa90de41-9166-475c-925a-3d79b02a694d" containerName="galera" containerID="cri-o://5c6f3a2a19c5ce5cd94327bc758bf13a86b7ae51c23f1c360ae9657455e0503b" gracePeriod=30 Nov 30 07:08:59 crc kubenswrapper[4941]: I1130 07:08:59.827109 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican75ae-account-delete-7krwx"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.932045 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942 is running failed: container process not found" containerID="07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.934697 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942 is running failed: container process not found" containerID="07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.935611 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942 is running failed: container process not found" containerID="07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 30 07:08:59 crc kubenswrapper[4941]: E1130 07:08:59.935638 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="ovsdbserver-sb" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.045619 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_14ce638b-1621-451a-80b6-0e13b6ffb734/ovsdbserver-sb/0.log" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.045710 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.065734 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rwxvw_94cbff61-3614-4efd-b4ba-36bef65f2ae7/openstack-network-exporter/0.log" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.065792 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.142286 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutrona2cc-account-delete-262pl"] Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.148604 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.163564 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172342 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cbff61-3614-4efd-b4ba-36bef65f2ae7-config\") pod \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172387 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-combined-ca-bundle\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172447 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-config\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172479 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5k5\" (UniqueName: \"kubernetes.io/projected/94cbff61-3614-4efd-b4ba-36bef65f2ae7-kube-api-access-vt5k5\") pod \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172536 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-metrics-certs-tls-certs\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172575 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172592 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdbserver-sb-tls-certs\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172649 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-metrics-certs-tls-certs\") pod \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172695 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-scripts\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172722 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovs-rundir\") pod \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172801 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxgcq\" (UniqueName: \"kubernetes.io/projected/14ce638b-1621-451a-80b6-0e13b6ffb734-kube-api-access-wxgcq\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172818 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-combined-ca-bundle\") pod \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172841 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovn-rundir\") pod \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\" (UID: \"94cbff61-3614-4efd-b4ba-36bef65f2ae7\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.172858 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdb-rundir\") pod \"14ce638b-1621-451a-80b6-0e13b6ffb734\" (UID: \"14ce638b-1621-451a-80b6-0e13b6ffb734\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.173069 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94cbff61-3614-4efd-b4ba-36bef65f2ae7-config" (OuterVolumeSpecName: "config") pod "94cbff61-3614-4efd-b4ba-36bef65f2ae7" (UID: "94cbff61-3614-4efd-b4ba-36bef65f2ae7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.173086 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-config" (OuterVolumeSpecName: "config") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.179707 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "94cbff61-3614-4efd-b4ba-36bef65f2ae7" (UID: "94cbff61-3614-4efd-b4ba-36bef65f2ae7"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.179983 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "94cbff61-3614-4efd-b4ba-36bef65f2ae7" (UID: "94cbff61-3614-4efd-b4ba-36bef65f2ae7"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180048 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180282 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-scripts" (OuterVolumeSpecName: "scripts") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180612 4941 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovs-rundir\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180632 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/94cbff61-3614-4efd-b4ba-36bef65f2ae7-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180642 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180652 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cbff61-3614-4efd-b4ba-36bef65f2ae7-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180660 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.180668 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/14ce638b-1621-451a-80b6-0e13b6ffb734-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.183221 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.186553 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14ce638b-1621-451a-80b6-0e13b6ffb734-kube-api-access-wxgcq" (OuterVolumeSpecName: "kube-api-access-wxgcq") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "kube-api-access-wxgcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.205058 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94cbff61-3614-4efd-b4ba-36bef65f2ae7-kube-api-access-vt5k5" (OuterVolumeSpecName: "kube-api-access-vt5k5") pod "94cbff61-3614-4efd-b4ba-36bef65f2ae7" (UID: "94cbff61-3614-4efd-b4ba-36bef65f2ae7"). InnerVolumeSpecName "kube-api-access-vt5k5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: W1130 07:09:00.205165 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbad7c3f7_8cec_4baf_808a_43184771d1da.slice/crio-2a3f44b675ce7e85ac120a43b22ce7da214a53cfc5f79b6a27bd94f7110107e6 WatchSource:0}: Error finding container 2a3f44b675ce7e85ac120a43b22ce7da214a53cfc5f79b6a27bd94f7110107e6: Status 404 returned error can't find the container with id 2a3f44b675ce7e85ac120a43b22ce7da214a53cfc5f79b6a27bd94f7110107e6 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.206642 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ea6e32f6-a5d9-4b23-9588-2ea6be572e72/ovsdbserver-nb/0.log" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.206821 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.244188 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancec0c5-account-delete-fx6st"] Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.249558 4941 generic.go:334] "Generic (PLEG): container finished" podID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerID="ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.249657 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" event={"ID":"126a91a7-8a81-40ef-87db-383ed37a26f4","Type":"ContainerDied","Data":"ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.251695 4941 generic.go:334] "Generic (PLEG): container finished" podID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerID="68e80ed464e919522f447820599362734c316c8d7a5459adff802d5f94a52e5f" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.251767 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a376fae9-3d2f-4247-b917-0d63e6f4a9da","Type":"ContainerDied","Data":"68e80ed464e919522f447820599362734c316c8d7a5459adff802d5f94a52e5f"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.252749 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican75ae-account-delete-7krwx" event={"ID":"1bb6c559-5c94-43b0-b6f0-3992652c720f","Type":"ContainerStarted","Data":"8ce79d44fde9c147f0f0daea7b0424a91b6cad70acd08cc8b9d37fddb9bd4ce7"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.262667 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_ea6e32f6-a5d9-4b23-9588-2ea6be572e72/ovsdbserver-nb/0.log" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.262766 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.262781 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"ea6e32f6-a5d9-4b23-9588-2ea6be572e72","Type":"ContainerDied","Data":"033ce2886ced490e6be076782e2970fa4ea1eca22b8faae5840b93e9311e948b"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.262816 4941 scope.go:117] "RemoveContainer" containerID="e2122d103d04cc614de8738002d8a386052f20ec6347c472adc19bc507389c00" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.278222 4941 generic.go:334] "Generic (PLEG): container finished" podID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerID="901debcd5ec58b9fd628099c8ca6fab3d5432f5cf3c027e27b344f81cf8d7260" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.278268 4941 generic.go:334] "Generic (PLEG): container finished" podID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerID="a96ce6ca3db082f07bc30a85b2dd3ed276f669d63c74e2397c8b08ebfa4f983e" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.278341 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a","Type":"ContainerDied","Data":"901debcd5ec58b9fd628099c8ca6fab3d5432f5cf3c027e27b344f81cf8d7260"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.278366 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a","Type":"ContainerDied","Data":"a96ce6ca3db082f07bc30a85b2dd3ed276f669d63c74e2397c8b08ebfa4f983e"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281203 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-config\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281240 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-swift-storage-0\") pod \"674f83a3-0419-43d7-a679-fed1bf09b047\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281259 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdbserver-nb-tls-certs\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281278 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-config\") pod \"674f83a3-0419-43d7-a679-fed1bf09b047\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281346 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281366 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-combined-ca-bundle\") pod \"5679f4ed-6882-4f85-93b2-02ccff357b48\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281405 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-nb\") pod \"674f83a3-0419-43d7-a679-fed1bf09b047\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281440 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config-secret\") pod \"5679f4ed-6882-4f85-93b2-02ccff357b48\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281462 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-metrics-certs-tls-certs\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281481 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjc2d\" (UniqueName: \"kubernetes.io/projected/5679f4ed-6882-4f85-93b2-02ccff357b48-kube-api-access-hjc2d\") pod \"5679f4ed-6882-4f85-93b2-02ccff357b48\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281504 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-scripts\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281545 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdb-rundir\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281588 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-combined-ca-bundle\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281630 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config\") pod \"5679f4ed-6882-4f85-93b2-02ccff357b48\" (UID: \"5679f4ed-6882-4f85-93b2-02ccff357b48\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281648 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-sb\") pod \"674f83a3-0419-43d7-a679-fed1bf09b047\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281668 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-svc\") pod \"674f83a3-0419-43d7-a679-fed1bf09b047\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281804 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvcb8\" (UniqueName: \"kubernetes.io/projected/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-kube-api-access-mvcb8\") pod \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\" (UID: \"ea6e32f6-a5d9-4b23-9588-2ea6be572e72\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.281845 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gpc2\" (UniqueName: \"kubernetes.io/projected/674f83a3-0419-43d7-a679-fed1bf09b047-kube-api-access-8gpc2\") pod \"674f83a3-0419-43d7-a679-fed1bf09b047\" (UID: \"674f83a3-0419-43d7-a679-fed1bf09b047\") " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.282123 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxgcq\" (UniqueName: \"kubernetes.io/projected/14ce638b-1621-451a-80b6-0e13b6ffb734-kube-api-access-wxgcq\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.282144 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5k5\" (UniqueName: \"kubernetes.io/projected/94cbff61-3614-4efd-b4ba-36bef65f2ae7-kube-api-access-vt5k5\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.282162 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.285318 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.285807 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.286011 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement6f8e-account-delete-sd7xt"] Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.291165 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-scripts" (OuterVolumeSpecName: "scripts") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.292893 4941 generic.go:334] "Generic (PLEG): container finished" podID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.292978 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerDied","Data":"de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.293555 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.295527 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-config" (OuterVolumeSpecName: "config") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.297100 4941 generic.go:334] "Generic (PLEG): container finished" podID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerID="aeb000c0386dce47c7a56b3872cfb39b10d750b9a481ebdd2c575d97c0bbecbd" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.297182 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6794dcdcd8-t9v24" event={"ID":"cf0e4aae-888b-4df8-a6e2-19a5f04b9656","Type":"ContainerDied","Data":"aeb000c0386dce47c7a56b3872cfb39b10d750b9a481ebdd2c575d97c0bbecbd"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311060 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311096 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311109 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311119 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311128 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311137 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311171 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311209 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311225 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311239 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311251 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.311261 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.318367 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-kube-api-access-mvcb8" (OuterVolumeSpecName: "kube-api-access-mvcb8") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "kube-api-access-mvcb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.322479 4941 generic.go:334] "Generic (PLEG): container finished" podID="5679f4ed-6882-4f85-93b2-02ccff357b48" containerID="7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de" exitCode=137 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.322609 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.324613 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5679f4ed-6882-4f85-93b2-02ccff357b48-kube-api-access-hjc2d" (OuterVolumeSpecName: "kube-api-access-hjc2d") pod "5679f4ed-6882-4f85-93b2-02ccff357b48" (UID: "5679f4ed-6882-4f85-93b2-02ccff357b48"). InnerVolumeSpecName "kube-api-access-hjc2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.328469 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/674f83a3-0419-43d7-a679-fed1bf09b047-kube-api-access-8gpc2" (OuterVolumeSpecName: "kube-api-access-8gpc2") pod "674f83a3-0419-43d7-a679-fed1bf09b047" (UID: "674f83a3-0419-43d7-a679-fed1bf09b047"). InnerVolumeSpecName "kube-api-access-8gpc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.328783 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona2cc-account-delete-262pl" event={"ID":"bad7c3f7-8cec-4baf-808a-43184771d1da","Type":"ContainerStarted","Data":"2a3f44b675ce7e85ac120a43b22ce7da214a53cfc5f79b6a27bd94f7110107e6"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.331434 4941 scope.go:117] "RemoveContainer" containerID="eba3ae86f525c89da8b2e9bcc6b9bd66ffd75c1417b2bfa7df563071df5d0c63" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.331932 4941 generic.go:334] "Generic (PLEG): container finished" podID="3315d9fd-71da-4f22-98d8-7142da896aab" containerID="80eaa1c0bcf74c714669821c2f8c3b37c333106eca38a0ef24d09c0faa3ec4c0" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.331981 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3315d9fd-71da-4f22-98d8-7142da896aab","Type":"ContainerDied","Data":"80eaa1c0bcf74c714669821c2f8c3b37c333106eca38a0ef24d09c0faa3ec4c0"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.334287 4941 generic.go:334] "Generic (PLEG): container finished" podID="fb74a593-764a-416b-897b-539bafb29c70" containerID="73cb23f3d6e038846d4f5b8f18403308641c3a8d002d0c1fd63ffe41c380b3ef" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.334360 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74d6754465-nglc5" event={"ID":"fb74a593-764a-416b-897b-539bafb29c70","Type":"ContainerDied","Data":"73cb23f3d6e038846d4f5b8f18403308641c3a8d002d0c1fd63ffe41c380b3ef"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.342005 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-rwxvw_94cbff61-3614-4efd-b4ba-36bef65f2ae7/openstack-network-exporter/0.log" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.342108 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-rwxvw" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.342448 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-rwxvw" event={"ID":"94cbff61-3614-4efd-b4ba-36bef65f2ae7","Type":"ContainerDied","Data":"36b116bad0212daa30d59ae82586ec7c2c84ebc36044a264d3a5765707f99ccb"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.347805 4941 generic.go:334] "Generic (PLEG): container finished" podID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerID="a9753f5333e8cf6a6a6e8a5b6bb203dcea93cf7b8757984a96698adeffa9d226" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.347908 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc","Type":"ContainerDied","Data":"a9753f5333e8cf6a6a6e8a5b6bb203dcea93cf7b8757984a96698adeffa9d226"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.355653 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder43da-account-delete-8nqxg" podStartSLOduration=4.355634609 podStartE2EDuration="4.355634609s" podCreationTimestamp="2025-11-30 07:08:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:09:00.354856056 +0000 UTC m=+1361.123027665" watchObservedRunningTime="2025-11-30 07:09:00.355634609 +0000 UTC m=+1361.123806218" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.376389 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94cbff61-3614-4efd-b4ba-36bef65f2ae7" (UID: "94cbff61-3614-4efd-b4ba-36bef65f2ae7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.384915 4941 generic.go:334] "Generic (PLEG): container finished" podID="f311bacd-2cef-44fe-95c4-38a7462cd4a6" containerID="199b3e26fc0f6545467dc0bfc386bbb742110c36e2dd4105695b124948c0d840" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.385039 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f311bacd-2cef-44fe-95c4-38a7462cd4a6","Type":"ContainerDied","Data":"199b3e26fc0f6545467dc0bfc386bbb742110c36e2dd4105695b124948c0d840"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.388025 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.389976 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390013 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390031 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjc2d\" (UniqueName: \"kubernetes.io/projected/5679f4ed-6882-4f85-93b2-02ccff357b48-kube-api-access-hjc2d\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390043 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390055 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390476 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390493 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvcb8\" (UniqueName: \"kubernetes.io/projected/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-kube-api-access-mvcb8\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390504 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390516 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gpc2\" (UniqueName: \"kubernetes.io/projected/674f83a3-0419-43d7-a679-fed1bf09b047-kube-api-access-8gpc2\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.390526 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.418030 4941 generic.go:334] "Generic (PLEG): container finished" podID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerID="f817ede79334101b289bba0c90e92e4faabbe95f8192168498ec3ad0a4f103ef" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.418054 4941 generic.go:334] "Generic (PLEG): container finished" podID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerID="1b70a2a767c9e03c6d699f586af8bd64fc30e67891d157e6de0aa7801367dc48" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.418096 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7dc97f7589-7ghf8" event={"ID":"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b","Type":"ContainerDied","Data":"f817ede79334101b289bba0c90e92e4faabbe95f8192168498ec3ad0a4f103ef"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.418119 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7dc97f7589-7ghf8" event={"ID":"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b","Type":"ContainerDied","Data":"1b70a2a767c9e03c6d699f586af8bd64fc30e67891d157e6de0aa7801367dc48"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.506606 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.506745 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf" event={"ID":"674f83a3-0419-43d7-a679-fed1bf09b047","Type":"ContainerDied","Data":"d68b79741fbd75bc412bce49028cda937cb87623d90255ecb5e476e92709d910"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.539251 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_14ce638b-1621-451a-80b6-0e13b6ffb734/ovsdbserver-sb/0.log" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.539368 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"14ce638b-1621-451a-80b6-0e13b6ffb734","Type":"ContainerDied","Data":"227bbadacaa7e6571399e5d6495a5f4cbfcef7397463c1ea0e5d1d8f57402712"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.539438 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.542585 4941 generic.go:334] "Generic (PLEG): container finished" podID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerID="539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575" exitCode=0 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.542722 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5678756fc7-642xv" event={"ID":"9dfbf8e6-60f7-47a0-9fee-3d532daf0503","Type":"ContainerDied","Data":"539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.545856 4941 generic.go:334] "Generic (PLEG): container finished" podID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerID="9d4cad614888876b728ec4d308e74ba608a61af7f849e758a89e8b1fc6414299" exitCode=143 Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.545886 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a573f7e0-ee6d-4847-a778-5f6ef41fd17f","Type":"ContainerDied","Data":"9d4cad614888876b728ec4d308e74ba608a61af7f849e758a89e8b1fc6414299"} Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.675234 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "5679f4ed-6882-4f85-93b2-02ccff357b48" (UID: "5679f4ed-6882-4f85-93b2-02ccff357b48"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.707830 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:00 crc kubenswrapper[4941]: E1130 07:09:00.913054 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 30 07:09:00 crc kubenswrapper[4941]: E1130 07:09:00.913333 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data podName:45978317-0f07-44da-8b74-fbaaec0e6105 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:04.91330669 +0000 UTC m=+1365.681478299 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data") pod "rabbitmq-cell1-server-0" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105") : configmap "rabbitmq-cell1-config-data" not found Nov 30 07:09:00 crc kubenswrapper[4941]: I1130 07:09:00.984011 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.004602 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell00ef1-account-delete-jbdnp"] Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.015542 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.016400 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.044428 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi2ee8-account-delete-jr5gz"] Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.047583 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.090869 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.098985 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "674f83a3-0419-43d7-a679-fed1bf09b047" (UID: "674f83a3-0419-43d7-a679-fed1bf09b047"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.120222 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.120250 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.120260 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.120272 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.150717 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5679f4ed-6882-4f85-93b2-02ccff357b48" (UID: "5679f4ed-6882-4f85-93b2-02ccff357b48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.153889 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "14ce638b-1621-451a-80b6-0e13b6ffb734" (UID: "14ce638b-1621-451a-80b6-0e13b6ffb734"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.206272 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "674f83a3-0419-43d7-a679-fed1bf09b047" (UID: "674f83a3-0419-43d7-a679-fed1bf09b047"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.217203 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "674f83a3-0419-43d7-a679-fed1bf09b047" (UID: "674f83a3-0419-43d7-a679-fed1bf09b047"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.224231 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.224256 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.224268 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/14ce638b-1621-451a-80b6-0e13b6ffb734-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.224280 4941 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.229447 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "94cbff61-3614-4efd-b4ba-36bef65f2ae7" (UID: "94cbff61-3614-4efd-b4ba-36bef65f2ae7"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.244152 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-config" (OuterVolumeSpecName: "config") pod "674f83a3-0419-43d7-a679-fed1bf09b047" (UID: "674f83a3-0419-43d7-a679-fed1bf09b047"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.247391 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "5679f4ed-6882-4f85-93b2-02ccff357b48" (UID: "5679f4ed-6882-4f85-93b2-02ccff357b48"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.265528 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "674f83a3-0419-43d7-a679-fed1bf09b047" (UID: "674f83a3-0419-43d7-a679-fed1bf09b047"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.267243 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "ea6e32f6-a5d9-4b23-9588-2ea6be572e72" (UID: "ea6e32f6-a5d9-4b23-9588-2ea6be572e72"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: E1130 07:09:01.326628 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.326646 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: E1130 07:09:01.326723 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data podName:4e7a5ee5-1f0c-4819-a375-891a5e2cea03 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:05.326697321 +0000 UTC m=+1366.094869020 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data") pod "rabbitmq-server-0" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03") : configmap "rabbitmq-config-data" not found Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.326754 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/5679f4ed-6882-4f85-93b2-02ccff357b48-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.326768 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/94cbff61-3614-4efd-b4ba-36bef65f2ae7-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.326780 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea6e32f6-a5d9-4b23-9588-2ea6be572e72-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.326792 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/674f83a3-0419-43d7-a679-fed1bf09b047-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.544873 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5679f4ed-6882-4f85-93b2-02ccff357b48" path="/var/lib/kubelet/pods/5679f4ed-6882-4f85-93b2-02ccff357b48/volumes" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.563827 4941 generic.go:334] "Generic (PLEG): container finished" podID="fa90de41-9166-475c-925a-3d79b02a694d" containerID="5c6f3a2a19c5ce5cd94327bc758bf13a86b7ae51c23f1c360ae9657455e0503b" exitCode=0 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.564122 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa90de41-9166-475c-925a-3d79b02a694d","Type":"ContainerDied","Data":"5c6f3a2a19c5ce5cd94327bc758bf13a86b7ae51c23f1c360ae9657455e0503b"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.564244 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"fa90de41-9166-475c-925a-3d79b02a694d","Type":"ContainerDied","Data":"c69da793530b1efdff4181dc1a1be7c3f55323899c0dec0d9d589dec59dd1e79"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.564265 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c69da793530b1efdff4181dc1a1be7c3f55323899c0dec0d9d589dec59dd1e79" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.568703 4941 scope.go:117] "RemoveContainer" containerID="7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.570297 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement6f8e-account-delete-sd7xt" event={"ID":"4f0e9278-1a7f-400f-8a07-31f88c84814b","Type":"ContainerStarted","Data":"f8797662183715011d342c437f17c4bda401042a4d898590a761684b56bf35a1"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.577137 4941 generic.go:334] "Generic (PLEG): container finished" podID="c4c68094-a8b8-4327-9ae1-335226d3b938" containerID="79080395206e181e7e0cedceeca71a93fc5f49de3ec9b01c6ffadf8a5877a042" exitCode=0 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.577180 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder43da-account-delete-8nqxg" event={"ID":"c4c68094-a8b8-4327-9ae1-335226d3b938","Type":"ContainerDied","Data":"79080395206e181e7e0cedceeca71a93fc5f49de3ec9b01c6ffadf8a5877a042"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.583879 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a","Type":"ContainerDied","Data":"ce0bfd1f85bac61182bbbc96627d218d0ad3739821fdb809fb08f1b7ad819d33"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.583927 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce0bfd1f85bac61182bbbc96627d218d0ad3739821fdb809fb08f1b7ad819d33" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.586032 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancec0c5-account-delete-fx6st" event={"ID":"d229a913-5522-4197-be77-fad9a0187f74","Type":"ContainerStarted","Data":"b3e08258dc90511575b3ea5d9cdf2a7fdbf3b7c07341c97a47280c9caa8b90c7"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.587495 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi2ee8-account-delete-jr5gz" event={"ID":"4c62ddf4-ab03-4aa9-968b-ed0a8898d367","Type":"ContainerStarted","Data":"8867cf0276a5ab29363c349c6244c2469dee9f6f9d24e1feff654f37861bb6a7"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.589600 4941 generic.go:334] "Generic (PLEG): container finished" podID="1bb6c559-5c94-43b0-b6f0-3992652c720f" containerID="3fbce1fd8d0f846aa7dbe3303e0c31d6b5075eef49ab34fff3344d334300b079" exitCode=0 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.589685 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican75ae-account-delete-7krwx" event={"ID":"1bb6c559-5c94-43b0-b6f0-3992652c720f","Type":"ContainerDied","Data":"3fbce1fd8d0f846aa7dbe3303e0c31d6b5075eef49ab34fff3344d334300b079"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.599096 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f311bacd-2cef-44fe-95c4-38a7462cd4a6","Type":"ContainerDied","Data":"bfe1241d7ec6909462dd3e55c931e8058b61f0aba5c9ecbb83a16810195dbf61"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.599485 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bfe1241d7ec6909462dd3e55c931e8058b61f0aba5c9ecbb83a16810195dbf61" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.601500 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.602469 4941 generic.go:334] "Generic (PLEG): container finished" podID="7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" containerID="54724f9e7b9cb397a6dbf2bf4b7b305271ebdeef511fa6d99df016ffce4d170d" exitCode=0 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.602574 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce","Type":"ContainerDied","Data":"54724f9e7b9cb397a6dbf2bf4b7b305271ebdeef511fa6d99df016ffce4d170d"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.602600 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce","Type":"ContainerDied","Data":"58b00ea84b7c014a2146414a3379487e3a4465eba343c42a24a4d5608f5cef53"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.602612 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58b00ea84b7c014a2146414a3379487e3a4465eba343c42a24a4d5608f5cef53" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.606824 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00ef1-account-delete-jbdnp" event={"ID":"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7","Type":"ContainerStarted","Data":"223c9af50ec252479a9284daf0c400d42370d25dba2a0f8ddfddeb6d1309e07f"} Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.637777 4941 scope.go:117] "RemoveContainer" containerID="7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.642911 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7dc97f7589-7ghf8" event={"ID":"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b","Type":"ContainerDied","Data":"14ec24140a57aaa464ef55ac41b92f4dade8d46043b44e8eacba548db02e51ba"} Nov 30 07:09:01 crc kubenswrapper[4941]: E1130 07:09:01.642983 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de\": container with ID starting with 7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de not found: ID does not exist" containerID="7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.643014 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de"} err="failed to get container status \"7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de\": rpc error: code = NotFound desc = could not find container \"7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de\": container with ID starting with 7f998986b8950e234b7bc2a24fd3e535f40df67a3670f9a085fdfa035b9598de not found: ID does not exist" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.643033 4941 scope.go:117] "RemoveContainer" containerID="c0d359c73119f6c2eb67ed8df045fe124ba61df2f455244a61a488956e3a2e34" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.643151 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7dc97f7589-7ghf8" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735275 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-run-httpd\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735352 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-internal-tls-certs\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735399 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-public-tls-certs\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735506 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-combined-ca-bundle\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735551 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-etc-swift\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735572 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-config-data\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735592 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmvxt\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-kube-api-access-rmvxt\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.735618 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-log-httpd\") pod \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\" (UID: \"08d286da-d376-4b8a-8a7f-c1d22b5a7c3b\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.736855 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.737374 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.745841 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-kube-api-access-rmvxt" (OuterVolumeSpecName: "kube-api-access-rmvxt") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "kube-api-access-rmvxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.747886 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.748482 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.843480 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.843864 4941 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.843887 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmvxt\" (UniqueName: \"kubernetes.io/projected/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-kube-api-access-rmvxt\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.843899 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.843907 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.843916 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.856562 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.892406 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.892835 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-central-agent" containerID="cri-o://258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d" gracePeriod=30 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.893281 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="proxy-httpd" containerID="cri-o://c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625" gracePeriod=30 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.893419 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="sg-core" containerID="cri-o://89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4" gracePeriod=30 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.893506 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-notification-agent" containerID="cri-o://b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b" gracePeriod=30 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.914690 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.915167 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="a65953b8-4285-412b-9670-7747951a62ae" containerName="kube-state-metrics" containerID="cri-o://8e135084f5429afb87f87c92443142f7bdd20c8c7f175058d25d225c984bf164" gracePeriod=30 Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.926668 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-config-data" (OuterVolumeSpecName: "config-data") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.946589 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr287\" (UniqueName: \"kubernetes.io/projected/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-kube-api-access-kr287\") pod \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.946734 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-etc-machine-id\") pod \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.946843 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-combined-ca-bundle\") pod \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.946883 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data\") pod \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.946927 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-scripts\") pod \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.947034 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data-custom\") pod \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\" (UID: \"74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a\") " Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.947506 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.947523 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.961503 4941 scope.go:117] "RemoveContainer" containerID="1de6943804783b4a852eb14e98f7041622c4c2594236d04ba6c18ed97142b408" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.965319 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" (UID: "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:01 crc kubenswrapper[4941]: I1130 07:09:01.965568 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" (UID: "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:01.999634 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:01.999828 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" containerName="memcached" containerID="cri-o://c8dea3d901536f92143b2d0853186bb74f8eb40c02c82dcf71d84565f2e4dbc7" gracePeriod=30 Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.021563 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.021714 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.036596 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.037519 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-scripts" (OuterVolumeSpecName: "scripts") pod "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" (UID: "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.039264 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.039341 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.066554 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.066641 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-kube-api-access-kr287" (OuterVolumeSpecName: "kube-api-access-kr287") pod "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" (UID: "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"). InnerVolumeSpecName "kube-api-access-kr287". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.067842 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.067863 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr287\" (UniqueName: \"kubernetes.io/projected/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-kube-api-access-kr287\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.067875 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.067884 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.098081 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:02 crc kubenswrapper[4941]: E1130 07:09:02.098161 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.116670 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-wj8qz"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.148074 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.171427 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-prvmj"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.172867 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.226995 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-prvmj"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.240449 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" (UID: "08d286da-d376-4b8a-8a7f-c1d22b5a7c3b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.276907 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-config-data\") pod \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.276943 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-combined-ca-bundle\") pod \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277028 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-nova-novncproxy-tls-certs\") pod \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277078 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twwp4\" (UniqueName: \"kubernetes.io/projected/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-kube-api-access-twwp4\") pod \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277128 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-config-data\") pod \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277155 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-vencrypt-tls-certs\") pod \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277175 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9zgr\" (UniqueName: \"kubernetes.io/projected/f311bacd-2cef-44fe-95c4-38a7462cd4a6-kube-api-access-t9zgr\") pod \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\" (UID: \"f311bacd-2cef-44fe-95c4-38a7462cd4a6\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277223 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-combined-ca-bundle\") pod \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\" (UID: \"7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.277588 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.318417 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-wj8qz"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.332590 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.336748 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-kube-api-access-twwp4" (OuterVolumeSpecName: "kube-api-access-twwp4") pod "7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" (UID: "7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce"). InnerVolumeSpecName "kube-api-access-twwp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.344455 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f311bacd-2cef-44fe-95c4-38a7462cd4a6-kube-api-access-t9zgr" (OuterVolumeSpecName: "kube-api-access-t9zgr") pod "f311bacd-2cef-44fe-95c4-38a7462cd4a6" (UID: "f311bacd-2cef-44fe-95c4-38a7462cd4a6"). InnerVolumeSpecName "kube-api-access-t9zgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.351594 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-79cf87bd4d-c9dvr"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.359650 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-79cf87bd4d-c9dvr" podUID="25deaa20-8f61-4317-ad4a-11df9ddff2fe" containerName="keystone-api" containerID="cri-o://b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4" gracePeriod=30 Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.381553 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9zgr\" (UniqueName: \"kubernetes.io/projected/f311bacd-2cef-44fe-95c4-38a7462cd4a6-kube-api-access-t9zgr\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.381582 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twwp4\" (UniqueName: \"kubernetes.io/projected/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-kube-api-access-twwp4\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.400030 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.409492 4941 scope.go:117] "RemoveContainer" containerID="572b5e690bd61ad1cd6041023c0766c57db67af247693faeb9a84f745057643c" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.468757 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d8fc4ccc9-9n2hf"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490008 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa90de41-9166-475c-925a-3d79b02a694d-config-data-generated\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490095 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-config-data-default\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490152 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fv99r\" (UniqueName: \"kubernetes.io/projected/fa90de41-9166-475c-925a-3d79b02a694d-kube-api-access-fv99r\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490169 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-galera-tls-certs\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490200 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490226 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-kolla-config\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490302 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-combined-ca-bundle\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.490368 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-operator-scripts\") pod \"fa90de41-9166-475c-925a-3d79b02a694d\" (UID: \"fa90de41-9166-475c-925a-3d79b02a694d\") " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.496062 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.497932 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.504271 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.504656 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa90de41-9166-475c-925a-3d79b02a694d-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.529803 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa90de41-9166-475c-925a-3d79b02a694d-kube-api-access-fv99r" (OuterVolumeSpecName: "kube-api-access-fv99r") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "kube-api-access-fv99r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.556469 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.588812 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zcbz9" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" probeResult="failure" output=< Nov 30 07:09:02 crc kubenswrapper[4941]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Nov 30 07:09:02 crc kubenswrapper[4941]: > Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.592822 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.592846 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa90de41-9166-475c-925a-3d79b02a694d-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.592855 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.592866 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fv99r\" (UniqueName: \"kubernetes.io/projected/fa90de41-9166-475c-925a-3d79b02a694d-kube-api-access-fv99r\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.592876 4941 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa90de41-9166-475c-925a-3d79b02a694d-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.596787 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-pk4mk"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.606358 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" (UID: "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.628291 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-pk4mk"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.642735 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.656698 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement6f8e-account-delete-sd7xt"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.683035 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": read tcp 10.217.0.2:35294->10.217.0.197:8775: read: connection reset by peer" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.685735 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": read tcp 10.217.0.2:35306->10.217.0.197:8775: read: connection reset by peer" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.697821 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-rwxvw"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.698510 4941 generic.go:334] "Generic (PLEG): container finished" podID="a65953b8-4285-412b-9670-7747951a62ae" containerID="8e135084f5429afb87f87c92443142f7bdd20c8c7f175058d25d225c984bf164" exitCode=2 Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.698626 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a65953b8-4285-412b-9670-7747951a62ae","Type":"ContainerDied","Data":"8e135084f5429afb87f87c92443142f7bdd20c8c7f175058d25d225c984bf164"} Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.705051 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.171:8776/healthcheck\": read tcp 10.217.0.2:42516->10.217.0.171:8776: read: connection reset by peer" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.741335 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.741376 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.754131 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-config-data" (OuterVolumeSpecName: "config-data") pod "7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" (UID: "7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.756725 4941 generic.go:334] "Generic (PLEG): container finished" podID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerID="009377a60fa215ae8250c38bc50dca1c5bfb321e79ea6ee7dd44dd9fbdcf7b42" exitCode=0 Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.756813 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c9965466b-7rmfq" event={"ID":"7a758cc8-4546-4982-b2a7-b7824ecfc118","Type":"ContainerDied","Data":"009377a60fa215ae8250c38bc50dca1c5bfb321e79ea6ee7dd44dd9fbdcf7b42"} Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.774581 4941 generic.go:334] "Generic (PLEG): container finished" podID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerID="89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4" exitCode=2 Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.774612 4941 generic.go:334] "Generic (PLEG): container finished" podID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerID="258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d" exitCode=0 Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.774765 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.775770 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.775907 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerDied","Data":"89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4"} Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.775942 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerDied","Data":"258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d"} Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.776103 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.776162 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.776888 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-rwxvw"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.833867 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6794dcdcd8-t9v24" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:50918->10.217.0.155:9311: read: connection reset by peer" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.835579 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" (UID: "7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.839652 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-6f8e-account-create-update-n2v6n"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.839947 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6794dcdcd8-t9v24" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.155:9311/healthcheck\": read tcp 10.217.0.2:50928->10.217.0.155:9311: read: connection reset by peer" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.843273 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.843295 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.848298 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-6f8e-account-create-update-n2v6n"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.855460 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-2hzwv"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.867866 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-2hzwv"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.875031 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.887419 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7dc97f7589-7ghf8"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.897268 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-7dc97f7589-7ghf8"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.906285 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-75ae-account-create-update-fl25n"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.928495 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-75ae-account-create-update-fl25n"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.943206 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican75ae-account-delete-7krwx"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.950286 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-nn258"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.960084 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-nn258"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.968843 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f9c5-account-create-update-r4chg"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.975241 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f9c5-account-create-update-r4chg"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.985367 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-xpsqj"] Nov 30 07:09:02 crc kubenswrapper[4941]: I1130 07:09:02.996527 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-xpsqj"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.015909 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-a2cc-account-create-update-qdnx2"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.022985 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutrona2cc-account-delete-262pl"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.027447 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.030747 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.036698 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-a2cc-account-create-update-qdnx2"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.039379 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.039415 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="ovn-northd" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.046528 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-s2h5c"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.055155 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.063384 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-s2h5c"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.064839 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.066177 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.066229 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" containerName="nova-scheduler-scheduler" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.077879 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancec0c5-account-delete-fx6st"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.086046 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-c0c5-account-create-update-d484t"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.092474 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-c0c5-account-create-update-d484t"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.107596 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-j62c8"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.115951 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-j62c8"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.120855 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi2ee8-account-delete-jr5gz"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.126956 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-2ee8-account-create-update-56k92"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.133864 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-2ee8-account-create-update-56k92"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.143596 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-wgg9g"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.150719 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-wgg9g"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.163638 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell00ef1-account-delete-jbdnp"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.172576 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0ef1-account-create-update-w7lxz"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.219090 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.235697 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "f311bacd-2cef-44fe-95c4-38a7462cd4a6" (UID: "f311bacd-2cef-44fe-95c4-38a7462cd4a6"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.237670 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f311bacd-2cef-44fe-95c4-38a7462cd4a6" (UID: "f311bacd-2cef-44fe-95c4-38a7462cd4a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.250961 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.251030 4941 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.251044 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.267356 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0ef1-account-create-update-w7lxz"] Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.274212 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-config-data" (OuterVolumeSpecName: "config-data") pod "f311bacd-2cef-44fe-95c4-38a7462cd4a6" (UID: "f311bacd-2cef-44fe-95c4-38a7462cd4a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.306007 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "f311bacd-2cef-44fe-95c4-38a7462cd4a6" (UID: "f311bacd-2cef-44fe-95c4-38a7462cd4a6"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.345572 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.359385 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.359422 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.359432 4941 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/f311bacd-2cef-44fe-95c4-38a7462cd4a6-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.387577 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="galera" containerID="cri-o://b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" gracePeriod=30 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.452922 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data" (OuterVolumeSpecName: "config-data") pod "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" (UID: "74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.455224 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "fa90de41-9166-475c-925a-3d79b02a694d" (UID: "fa90de41-9166-475c-925a-3d79b02a694d"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.463210 4941 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa90de41-9166-475c-925a-3d79b02a694d-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.463887 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.540399 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" path="/var/lib/kubelet/pods/08d286da-d376-4b8a-8a7f-c1d22b5a7c3b/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.541220 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61013b6e-d5e6-4867-b2b9-e88c540ad362" path="/var/lib/kubelet/pods/61013b6e-d5e6-4867-b2b9-e88c540ad362/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.541731 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66dbaf24-8e9d-4981-a37c-561c8c7e98aa" path="/var/lib/kubelet/pods/66dbaf24-8e9d-4981-a37c-561c8c7e98aa/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.542798 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" path="/var/lib/kubelet/pods/674f83a3-0419-43d7-a679-fed1bf09b047/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.543364 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3" path="/var/lib/kubelet/pods/6b9d9295-79d8-4d6e-98f4-c184a3ebb8d3/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.543838 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="727089d5-851d-4e5e-80ed-f09e770f1fe2" path="/var/lib/kubelet/pods/727089d5-851d-4e5e-80ed-f09e770f1fe2/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.544784 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="831541f1-b1bf-4625-b4db-8a2b57d0481e" path="/var/lib/kubelet/pods/831541f1-b1bf-4625-b4db-8a2b57d0481e/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.545299 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94cbff61-3614-4efd-b4ba-36bef65f2ae7" path="/var/lib/kubelet/pods/94cbff61-3614-4efd-b4ba-36bef65f2ae7/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.545911 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31804e8-764b-460f-bd79-e64ef5b7d06a" path="/var/lib/kubelet/pods/a31804e8-764b-460f-bd79-e64ef5b7d06a/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.546515 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c08bb8f0-daa5-4321-8e42-bf2713833cbf" path="/var/lib/kubelet/pods/c08bb8f0-daa5-4321-8e42-bf2713833cbf/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.547610 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5c83930-89f1-45d1-827f-1584ee8ce557" path="/var/lib/kubelet/pods/c5c83930-89f1-45d1-827f-1584ee8ce557/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.548143 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8af3337-ba7b-41d6-915b-d9eeb7443354" path="/var/lib/kubelet/pods/c8af3337-ba7b-41d6-915b-d9eeb7443354/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.548681 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0ce952e-a290-4b54-b720-c34632cef479" path="/var/lib/kubelet/pods/d0ce952e-a290-4b54-b720-c34632cef479/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.549624 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dee8aa58-2a9c-4597-9d06-629fa6e37648" path="/var/lib/kubelet/pods/dee8aa58-2a9c-4597-9d06-629fa6e37648/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.550111 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e76ecf6f-9843-4077-b8ff-602840dac5af" path="/var/lib/kubelet/pods/e76ecf6f-9843-4077-b8ff-602840dac5af/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.550593 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eace6e2e-dba9-4430-9383-0cbcb862d675" path="/var/lib/kubelet/pods/eace6e2e-dba9-4430-9383-0cbcb862d675/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.551057 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed5cfc02-cb0d-4b4c-a239-40f23890dbd3" path="/var/lib/kubelet/pods/ed5cfc02-cb0d-4b4c-a239-40f23890dbd3/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.556521 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3664bd8-d8a8-4d42-b983-ee81649d4db2" path="/var/lib/kubelet/pods/f3664bd8-d8a8-4d42-b983-ee81649d4db2/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.557104 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fed42e7e-ed1d-4463-8088-3d60e06dd00e" path="/var/lib/kubelet/pods/fed42e7e-ed1d-4463-8088-3d60e06dd00e/volumes" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.810561 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancec0c5-account-delete-fx6st" event={"ID":"d229a913-5522-4197-be77-fad9a0187f74","Type":"ContainerStarted","Data":"f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.810704 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glancec0c5-account-delete-fx6st" podUID="d229a913-5522-4197-be77-fad9a0187f74" containerName="mariadb-account-delete" containerID="cri-o://f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d" gracePeriod=30 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.810406 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.833498 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican75ae-account-delete-7krwx" event={"ID":"1bb6c559-5c94-43b0-b6f0-3992652c720f","Type":"ContainerDied","Data":"8ce79d44fde9c147f0f0daea7b0424a91b6cad70acd08cc8b9d37fddb9bd4ce7"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.833539 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ce79d44fde9c147f0f0daea7b0424a91b6cad70acd08cc8b9d37fddb9bd4ce7" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.847703 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glancec0c5-account-delete-fx6st" podStartSLOduration=6.847686274 podStartE2EDuration="6.847686274s" podCreationTimestamp="2025-11-30 07:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:09:03.842615469 +0000 UTC m=+1364.610787078" watchObservedRunningTime="2025-11-30 07:09:03.847686274 +0000 UTC m=+1364.615857883" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.854611 4941 generic.go:334] "Generic (PLEG): container finished" podID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" containerID="370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.854737 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"14ac7adf-c7e3-4512-9c65-6361d005b4b7","Type":"ContainerDied","Data":"370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.860291 4941 generic.go:334] "Generic (PLEG): container finished" podID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" containerID="c8dea3d901536f92143b2d0853186bb74f8eb40c02c82dcf71d84565f2e4dbc7" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.860401 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c05c5cf3-bcb4-4307-a601-fbecde4f026b","Type":"ContainerDied","Data":"c8dea3d901536f92143b2d0853186bb74f8eb40c02c82dcf71d84565f2e4dbc7"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.860429 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c05c5cf3-bcb4-4307-a601-fbecde4f026b","Type":"ContainerDied","Data":"7cd3a8f355128932455394c1f51e66da3f431efc5a1a14840d4cc183dcb61623"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.860440 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cd3a8f355128932455394c1f51e66da3f431efc5a1a14840d4cc183dcb61623" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.876204 4941 generic.go:334] "Generic (PLEG): container finished" podID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerID="c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.876271 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerDied","Data":"c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.877538 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi2ee8-account-delete-jr5gz" event={"ID":"4c62ddf4-ab03-4aa9-968b-ed0a8898d367","Type":"ContainerStarted","Data":"a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.877956 4941 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi2ee8-account-delete-jr5gz" secret="" err="secret \"galera-openstack-dockercfg-48dx7\" not found" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.880681 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a65953b8-4285-412b-9670-7747951a62ae","Type":"ContainerDied","Data":"521abc31c0f8b06fae40f6723b74a8072511b6b173dfcfbfc7b2a2f7ed5c112b"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.880719 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="521abc31c0f8b06fae40f6723b74a8072511b6b173dfcfbfc7b2a2f7ed5c112b" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.887605 4941 generic.go:334] "Generic (PLEG): container finished" podID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerID="6a679ded02687521bbab8ba0d141ec44027e549ed85d08f01b4106e0cd6db8b8" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.887684 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a573f7e0-ee6d-4847-a778-5f6ef41fd17f","Type":"ContainerDied","Data":"6a679ded02687521bbab8ba0d141ec44027e549ed85d08f01b4106e0cd6db8b8"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.896896 4941 generic.go:334] "Generic (PLEG): container finished" podID="3315d9fd-71da-4f22-98d8-7142da896aab" containerID="fd044d0caf33128dc424d871cee00e10115231eb3535e2249f4d39b8c3c04af1" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.896967 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3315d9fd-71da-4f22-98d8-7142da896aab","Type":"ContainerDied","Data":"fd044d0caf33128dc424d871cee00e10115231eb3535e2249f4d39b8c3c04af1"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.896996 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3315d9fd-71da-4f22-98d8-7142da896aab","Type":"ContainerDied","Data":"4d3514728f85901cd14b4e4daaaf405eb1384a94683b10d90ca7c50764b4cfbf"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.897008 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d3514728f85901cd14b4e4daaaf405eb1384a94683b10d90ca7c50764b4cfbf" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.908528 4941 generic.go:334] "Generic (PLEG): container finished" podID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerID="dfb235dd19464a6b133288b3327f7e345c63de23737ba8b6a478ad9216727e2b" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.908655 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a376fae9-3d2f-4247-b917-0d63e6f4a9da","Type":"ContainerDied","Data":"dfb235dd19464a6b133288b3327f7e345c63de23737ba8b6a478ad9216727e2b"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.911615 4941 generic.go:334] "Generic (PLEG): container finished" podID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerID="96e02e36152e3e77bc3bcb37965f0c42f78aaeec0dbbaa72be4ad8497a927704" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.911679 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1822cd0b-b52d-49d7-b787-a1091edfc585","Type":"ContainerDied","Data":"96e02e36152e3e77bc3bcb37965f0c42f78aaeec0dbbaa72be4ad8497a927704"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.911711 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1822cd0b-b52d-49d7-b787-a1091edfc585","Type":"ContainerDied","Data":"ebc4760d02ea26948818c3505bbf1ced4d372e9a77ff8f1caf03fce3b5bca38a"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.911722 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebc4760d02ea26948818c3505bbf1ced4d372e9a77ff8f1caf03fce3b5bca38a" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.913335 4941 generic.go:334] "Generic (PLEG): container finished" podID="88971618-54e2-4670-be08-a6ae63ed99df" containerID="ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.913374 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"88971618-54e2-4670-be08-a6ae63ed99df","Type":"ContainerDied","Data":"ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.914408 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder43da-account-delete-8nqxg" event={"ID":"c4c68094-a8b8-4327-9ae1-335226d3b938","Type":"ContainerDied","Data":"f2509a49bab6ccbd75baa4546123fd004bc0541d9ef823d46c73276c9fc37974"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.914439 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2509a49bab6ccbd75baa4546123fd004bc0541d9ef823d46c73276c9fc37974" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.916886 4941 generic.go:334] "Generic (PLEG): container finished" podID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerID="b77426d5a27eb3609eea859e72da0ab57c475fc3ec836c9ba0a47eb046b015a1" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.917025 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc","Type":"ContainerDied","Data":"b77426d5a27eb3609eea859e72da0ab57c475fc3ec836c9ba0a47eb046b015a1"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.917043 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc","Type":"ContainerDied","Data":"2ab8ea4ffd71c0b3f30bcce75240082e341bcc1790224dff20c8c1d510c7b505"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.917053 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ab8ea4ffd71c0b3f30bcce75240082e341bcc1790224dff20c8c1d510c7b505" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.918526 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c9965466b-7rmfq" event={"ID":"7a758cc8-4546-4982-b2a7-b7824ecfc118","Type":"ContainerDied","Data":"066567c51ffcaa136d125b9b00af881dc67f8f5a4a55ced105e493837ac88c12"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.918549 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="066567c51ffcaa136d125b9b00af881dc67f8f5a4a55ced105e493837ac88c12" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.924473 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement6f8e-account-delete-sd7xt" podUID="4f0e9278-1a7f-400f-8a07-31f88c84814b" containerName="mariadb-account-delete" containerID="cri-o://2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85" gracePeriod=30 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.925731 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement6f8e-account-delete-sd7xt" event={"ID":"4f0e9278-1a7f-400f-8a07-31f88c84814b","Type":"ContainerStarted","Data":"2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.929991 4941 generic.go:334] "Generic (PLEG): container finished" podID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerID="26ada5d7acc3a461a8a330fae6ff00d6ad1801a8caa3a433600d9307e3c1a50d" exitCode=0 Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.930033 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6794dcdcd8-t9v24" event={"ID":"cf0e4aae-888b-4df8-a6e2-19a5f04b9656","Type":"ContainerDied","Data":"26ada5d7acc3a461a8a330fae6ff00d6ad1801a8caa3a433600d9307e3c1a50d"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.930056 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6794dcdcd8-t9v24" event={"ID":"cf0e4aae-888b-4df8-a6e2-19a5f04b9656","Type":"ContainerDied","Data":"900e56bf351145d2e33c5f723dada72f701a715ec5a9918b6ee37c0e3f41c9a5"} Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.930067 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="900e56bf351145d2e33c5f723dada72f701a715ec5a9918b6ee37c0e3f41c9a5" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.930697 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapi2ee8-account-delete-jr5gz" podStartSLOduration=6.930688636 podStartE2EDuration="6.930688636s" podCreationTimestamp="2025-11-30 07:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:09:03.898082298 +0000 UTC m=+1364.666253907" watchObservedRunningTime="2025-11-30 07:09:03.930688636 +0000 UTC m=+1364.698860245" Nov 30 07:09:03 crc kubenswrapper[4941]: I1130 07:09:03.956934 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement6f8e-account-delete-sd7xt" podStartSLOduration=6.95691327 podStartE2EDuration="6.95691327s" podCreationTimestamp="2025-11-30 07:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:09:03.939047692 +0000 UTC m=+1364.707219301" watchObservedRunningTime="2025-11-30 07:09:03.95691327 +0000 UTC m=+1364.725084879" Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.975201 4941 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 30 07:09:03 crc kubenswrapper[4941]: E1130 07:09:03.975257 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts podName:4c62ddf4-ab03-4aa9-968b-ed0a8898d367 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:04.475243681 +0000 UTC m=+1365.243415290 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts") pod "novaapi2ee8-account-delete-jr5gz" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367") : configmap "openstack-scripts" not found Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.046211 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.068373 4941 scope.go:117] "RemoveContainer" containerID="d012ac9e18256da2434ecdcf48894a12178c369b643c9b30f25771ee1c4fba92" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.070507 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.076970 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-combined-ca-bundle\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.077130 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-public-tls-certs\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.077203 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-config-data\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.077278 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a758cc8-4546-4982-b2a7-b7824ecfc118-logs\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.077314 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r4b8\" (UniqueName: \"kubernetes.io/projected/7a758cc8-4546-4982-b2a7-b7824ecfc118-kube-api-access-6r4b8\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.077362 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-internal-tls-certs\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.077416 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-scripts\") pod \"7a758cc8-4546-4982-b2a7-b7824ecfc118\" (UID: \"7a758cc8-4546-4982-b2a7-b7824ecfc118\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.079847 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a758cc8-4546-4982-b2a7-b7824ecfc118-logs" (OuterVolumeSpecName: "logs") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.126428 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.130470 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-scripts" (OuterVolumeSpecName: "scripts") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.133548 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a758cc8-4546-4982-b2a7-b7824ecfc118-kube-api-access-6r4b8" (OuterVolumeSpecName: "kube-api-access-6r4b8") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "kube-api-access-6r4b8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.138025 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.138908 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179382 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx8gl\" (UniqueName: \"kubernetes.io/projected/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kube-api-access-lx8gl\") pod \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179487 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-combined-ca-bundle\") pod \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179583 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-certs\") pod \"a65953b8-4285-412b-9670-7747951a62ae\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179648 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-combined-ca-bundle\") pod \"a65953b8-4285-412b-9670-7747951a62ae\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179670 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bb6c559-5c94-43b0-b6f0-3992652c720f-operator-scripts\") pod \"1bb6c559-5c94-43b0-b6f0-3992652c720f\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179700 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kolla-config\") pod \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179722 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-config\") pod \"a65953b8-4285-412b-9670-7747951a62ae\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179792 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/a65953b8-4285-412b-9670-7747951a62ae-kube-api-access-2lt72\") pod \"a65953b8-4285-412b-9670-7747951a62ae\" (UID: \"a65953b8-4285-412b-9670-7747951a62ae\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179833 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-memcached-tls-certs\") pod \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179926 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-config-data\") pod \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\" (UID: \"c05c5cf3-bcb4-4307-a601-fbecde4f026b\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.179955 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp9m8\" (UniqueName: \"kubernetes.io/projected/1bb6c559-5c94-43b0-b6f0-3992652c720f-kube-api-access-rp9m8\") pod \"1bb6c559-5c94-43b0-b6f0-3992652c720f\" (UID: \"1bb6c559-5c94-43b0-b6f0-3992652c720f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.180469 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7a758cc8-4546-4982-b2a7-b7824ecfc118-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.180493 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r4b8\" (UniqueName: \"kubernetes.io/projected/7a758cc8-4546-4982-b2a7-b7824ecfc118-kube-api-access-6r4b8\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.180505 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.182273 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bb6c559-5c94-43b0-b6f0-3992652c720f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1bb6c559-5c94-43b0-b6f0-3992652c720f" (UID: "1bb6c559-5c94-43b0-b6f0-3992652c720f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.184970 4941 scope.go:117] "RemoveContainer" containerID="07f994f9e1c2fa25bd24a1b436f837eb006877e11013a589a34bf028d013c942" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.188665 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kube-api-access-lx8gl" (OuterVolumeSpecName: "kube-api-access-lx8gl") pod "c05c5cf3-bcb4-4307-a601-fbecde4f026b" (UID: "c05c5cf3-bcb4-4307-a601-fbecde4f026b"). InnerVolumeSpecName "kube-api-access-lx8gl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.191961 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-config-data" (OuterVolumeSpecName: "config-data") pod "c05c5cf3-bcb4-4307-a601-fbecde4f026b" (UID: "c05c5cf3-bcb4-4307-a601-fbecde4f026b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.192235 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a65953b8-4285-412b-9670-7747951a62ae-kube-api-access-2lt72" (OuterVolumeSpecName: "kube-api-access-2lt72") pod "a65953b8-4285-412b-9670-7747951a62ae" (UID: "a65953b8-4285-412b-9670-7747951a62ae"). InnerVolumeSpecName "kube-api-access-2lt72". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.202409 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c05c5cf3-bcb4-4307-a601-fbecde4f026b" (UID: "c05c5cf3-bcb4-4307-a601-fbecde4f026b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.211699 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.213250 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.237268 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.244448 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.244624 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.260076 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c05c5cf3-bcb4-4307-a601-fbecde4f026b" (UID: "c05c5cf3-bcb4-4307-a601-fbecde4f026b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.268202 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.270799 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bb6c559-5c94-43b0-b6f0-3992652c720f-kube-api-access-rp9m8" (OuterVolumeSpecName: "kube-api-access-rp9m8") pod "1bb6c559-5c94-43b0-b6f0-3992652c720f" (UID: "1bb6c559-5c94-43b0-b6f0-3992652c720f"). InnerVolumeSpecName "kube-api-access-rp9m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.279135 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285437 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-combined-ca-bundle\") pod \"3315d9fd-71da-4f22-98d8-7142da896aab\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285474 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285533 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data-custom\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285552 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szdql\" (UniqueName: \"kubernetes.io/projected/3315d9fd-71da-4f22-98d8-7142da896aab-kube-api-access-szdql\") pod \"3315d9fd-71da-4f22-98d8-7142da896aab\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285579 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1822cd0b-b52d-49d7-b787-a1091edfc585-etc-machine-id\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285883 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1822cd0b-b52d-49d7-b787-a1091edfc585-logs\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285905 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-combined-ca-bundle\") pod \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285957 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzrjx\" (UniqueName: \"kubernetes.io/projected/1822cd0b-b52d-49d7-b787-a1091edfc585-kube-api-access-wzrjx\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.285993 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-logs\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286013 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7jbx\" (UniqueName: \"kubernetes.io/projected/c4c68094-a8b8-4327-9ae1-335226d3b938-kube-api-access-f7jbx\") pod \"c4c68094-a8b8-4327-9ae1-335226d3b938\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286045 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-logs\") pod \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286106 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-nova-metadata-tls-certs\") pod \"3315d9fd-71da-4f22-98d8-7142da896aab\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286122 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wltsh\" (UniqueName: \"kubernetes.io/projected/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-kube-api-access-wltsh\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286153 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-internal-tls-certs\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286168 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-combined-ca-bundle\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.286195 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4c68094-a8b8-4327-9ae1-335226d3b938-operator-scripts\") pod \"c4c68094-a8b8-4327-9ae1-335226d3b938\" (UID: \"c4c68094-a8b8-4327-9ae1-335226d3b938\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.289544 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.289750 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-combined-ca-bundle\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.289777 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data-custom\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.289832 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-config-data\") pod \"3315d9fd-71da-4f22-98d8-7142da896aab\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.289848 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3315d9fd-71da-4f22-98d8-7142da896aab-logs\") pod \"3315d9fd-71da-4f22-98d8-7142da896aab\" (UID: \"3315d9fd-71da-4f22-98d8-7142da896aab\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.289912 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-public-tls-certs\") pod \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290635 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-config-data\") pod \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290673 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-public-tls-certs\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290722 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-scripts\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290742 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k5mm\" (UniqueName: \"kubernetes.io/projected/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-kube-api-access-6k5mm\") pod \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290762 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-public-tls-certs\") pod \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\" (UID: \"cf0e4aae-888b-4df8-a6e2-19a5f04b9656\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290884 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290923 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-internal-tls-certs\") pod \"1822cd0b-b52d-49d7-b787-a1091edfc585\" (UID: \"1822cd0b-b52d-49d7-b787-a1091edfc585\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.290946 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-internal-tls-certs\") pod \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\" (UID: \"4571a7ec-45e1-4c4b-a96a-b9841b3d89bc\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.291474 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1822cd0b-b52d-49d7-b787-a1091edfc585-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.291883 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3315d9fd-71da-4f22-98d8-7142da896aab-logs" (OuterVolumeSpecName: "logs") pod "3315d9fd-71da-4f22-98d8-7142da896aab" (UID: "3315d9fd-71da-4f22-98d8-7142da896aab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.292022 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-logs" (OuterVolumeSpecName: "logs") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.293852 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-logs" (OuterVolumeSpecName: "logs") pod "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" (UID: "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.295924 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-kube-api-access-wltsh" (OuterVolumeSpecName: "kube-api-access-wltsh") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "kube-api-access-wltsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.296546 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1822cd0b-b52d-49d7-b787-a1091edfc585-logs" (OuterVolumeSpecName: "logs") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.296682 4941 scope.go:117] "RemoveContainer" containerID="f817ede79334101b289bba0c90e92e4faabbe95f8192168498ec3ad0a4f103ef" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.298039 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-scripts" (OuterVolumeSpecName: "scripts") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.300276 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.303418 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4c68094-a8b8-4327-9ae1-335226d3b938-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4c68094-a8b8-4327-9ae1-335226d3b938" (UID: "c4c68094-a8b8-4327-9ae1-335226d3b938"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.306778 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310231 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1822cd0b-b52d-49d7-b787-a1091edfc585-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310256 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310269 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bb6c559-5c94-43b0-b6f0-3992652c720f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310278 4941 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310290 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/a65953b8-4285-412b-9670-7747951a62ae-kube-api-access-2lt72\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310302 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c05c5cf3-bcb4-4307-a601-fbecde4f026b-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310311 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp9m8\" (UniqueName: \"kubernetes.io/projected/1bb6c559-5c94-43b0-b6f0-3992652c720f-kube-api-access-rp9m8\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.310320 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx8gl\" (UniqueName: \"kubernetes.io/projected/c05c5cf3-bcb4-4307-a601-fbecde4f026b-kube-api-access-lx8gl\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.312835 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1822cd0b-b52d-49d7-b787-a1091edfc585-kube-api-access-wzrjx" (OuterVolumeSpecName: "kube-api-access-wzrjx") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "kube-api-access-wzrjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.316113 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3315d9fd-71da-4f22-98d8-7142da896aab-kube-api-access-szdql" (OuterVolumeSpecName: "kube-api-access-szdql") pod "3315d9fd-71da-4f22-98d8-7142da896aab" (UID: "3315d9fd-71da-4f22-98d8-7142da896aab"). InnerVolumeSpecName "kube-api-access-szdql". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.322662 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.330694 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-kube-api-access-6k5mm" (OuterVolumeSpecName: "kube-api-access-6k5mm") pod "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" (UID: "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc"). InnerVolumeSpecName "kube-api-access-6k5mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.331124 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4c68094-a8b8-4327-9ae1-335226d3b938-kube-api-access-f7jbx" (OuterVolumeSpecName: "kube-api-access-f7jbx") pod "c4c68094-a8b8-4327-9ae1-335226d3b938" (UID: "c4c68094-a8b8-4327-9ae1-335226d3b938"). InnerVolumeSpecName "kube-api-access-f7jbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.333464 4941 scope.go:117] "RemoveContainer" containerID="1b70a2a767c9e03c6d699f586af8bd64fc30e67891d157e6de0aa7801367dc48" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.352676 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.352749 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.364632 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.370073 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411424 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmxtj\" (UniqueName: \"kubernetes.io/projected/a376fae9-3d2f-4247-b917-0d63e6f4a9da-kube-api-access-lmxtj\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411471 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-config-data\") pod \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411493 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411529 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-scripts\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411617 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f26k6\" (UniqueName: \"kubernetes.io/projected/88971618-54e2-4670-be08-a6ae63ed99df-kube-api-access-f26k6\") pod \"88971618-54e2-4670-be08-a6ae63ed99df\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411756 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-combined-ca-bundle\") pod \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411791 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-httpd-run\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411822 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-config-data\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411836 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-config-data\") pod \"88971618-54e2-4670-be08-a6ae63ed99df\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411859 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-combined-ca-bundle\") pod \"88971618-54e2-4670-be08-a6ae63ed99df\" (UID: \"88971618-54e2-4670-be08-a6ae63ed99df\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411885 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-public-tls-certs\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411902 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-logs\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411920 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-combined-ca-bundle\") pod \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\" (UID: \"a376fae9-3d2f-4247-b917-0d63e6f4a9da\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.411938 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2vs7\" (UniqueName: \"kubernetes.io/projected/14ac7adf-c7e3-4512-9c65-6361d005b4b7-kube-api-access-n2vs7\") pod \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\" (UID: \"14ac7adf-c7e3-4512-9c65-6361d005b4b7\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412261 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412273 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k5mm\" (UniqueName: \"kubernetes.io/projected/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-kube-api-access-6k5mm\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412282 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412291 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szdql\" (UniqueName: \"kubernetes.io/projected/3315d9fd-71da-4f22-98d8-7142da896aab-kube-api-access-szdql\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412298 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1822cd0b-b52d-49d7-b787-a1091edfc585-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412307 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzrjx\" (UniqueName: \"kubernetes.io/projected/1822cd0b-b52d-49d7-b787-a1091edfc585-kube-api-access-wzrjx\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412314 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412339 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7jbx\" (UniqueName: \"kubernetes.io/projected/c4c68094-a8b8-4327-9ae1-335226d3b938-kube-api-access-f7jbx\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412347 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412356 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wltsh\" (UniqueName: \"kubernetes.io/projected/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-kube-api-access-wltsh\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412365 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4c68094-a8b8-4327-9ae1-335226d3b938-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412374 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412381 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3315d9fd-71da-4f22-98d8-7142da896aab-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412376 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.412991 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-logs" (OuterVolumeSpecName: "logs") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.429553 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.429598 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.441459 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88971618-54e2-4670-be08-a6ae63ed99df-kube-api-access-f26k6" (OuterVolumeSpecName: "kube-api-access-f26k6") pod "88971618-54e2-4670-be08-a6ae63ed99df" (UID: "88971618-54e2-4670-be08-a6ae63ed99df"). InnerVolumeSpecName "kube-api-access-f26k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.447357 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "c05c5cf3-bcb4-4307-a601-fbecde4f026b" (UID: "c05c5cf3-bcb4-4307-a601-fbecde4f026b"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.455703 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14ac7adf-c7e3-4512-9c65-6361d005b4b7-kube-api-access-n2vs7" (OuterVolumeSpecName: "kube-api-access-n2vs7") pod "14ac7adf-c7e3-4512-9c65-6361d005b4b7" (UID: "14ac7adf-c7e3-4512-9c65-6361d005b4b7"). InnerVolumeSpecName "kube-api-access-n2vs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.456434 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-config-data" (OuterVolumeSpecName: "config-data") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.458947 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a376fae9-3d2f-4247-b917-0d63e6f4a9da-kube-api-access-lmxtj" (OuterVolumeSpecName: "kube-api-access-lmxtj") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "kube-api-access-lmxtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.466118 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.500675 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-scripts" (OuterVolumeSpecName: "scripts") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.513923 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514440 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f26k6\" (UniqueName: \"kubernetes.io/projected/88971618-54e2-4670-be08-a6ae63ed99df-kube-api-access-f26k6\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514451 4941 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c05c5cf3-bcb4-4307-a601-fbecde4f026b-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514460 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514468 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514477 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a376fae9-3d2f-4247-b917-0d63e6f4a9da-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514486 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2vs7\" (UniqueName: \"kubernetes.io/projected/14ac7adf-c7e3-4512-9c65-6361d005b4b7-kube-api-access-n2vs7\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514495 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmxtj\" (UniqueName: \"kubernetes.io/projected/a376fae9-3d2f-4247-b917-0d63e6f4a9da-kube-api-access-lmxtj\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.514513 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.513960 4941 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.514767 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts podName:4c62ddf4-ab03-4aa9-968b-ed0a8898d367 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:05.514744145 +0000 UTC m=+1366.282915784 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts") pod "novaapi2ee8-account-delete-jr5gz" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367") : configmap "openstack-scripts" not found Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.531109 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "a65953b8-4285-412b-9670-7747951a62ae" (UID: "a65953b8-4285-412b-9670-7747951a62ae"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.533600 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" (UID: "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.570445 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a65953b8-4285-412b-9670-7747951a62ae" (UID: "a65953b8-4285-412b-9670-7747951a62ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.574541 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.613269 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": dial tcp 10.217.0.195:3000: connect: connection refused" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.616074 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.616096 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.616105 4941 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.616116 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.622390 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3315d9fd-71da-4f22-98d8-7142da896aab" (UID: "3315d9fd-71da-4f22-98d8-7142da896aab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.630599 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3315d9fd-71da-4f22-98d8-7142da896aab" (UID: "3315d9fd-71da-4f22-98d8-7142da896aab"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.633668 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.667171 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.681254 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" (UID: "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.693518 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-config-data" (OuterVolumeSpecName: "config-data") pod "3315d9fd-71da-4f22-98d8-7142da896aab" (UID: "3315d9fd-71da-4f22-98d8-7142da896aab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.705570 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718465 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718507 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718517 4941 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718529 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718539 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3315d9fd-71da-4f22-98d8-7142da896aab-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718548 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.718558 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.729090 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-config-data" (OuterVolumeSpecName: "config-data") pod "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" (UID: "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.731352 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-config-data" (OuterVolumeSpecName: "config-data") pod "14ac7adf-c7e3-4512-9c65-6361d005b4b7" (UID: "14ac7adf-c7e3-4512-9c65-6361d005b4b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.751133 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.752791 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.760544 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-config-data" (OuterVolumeSpecName: "config-data") pod "88971618-54e2-4670-be08-a6ae63ed99df" (UID: "88971618-54e2-4670-be08-a6ae63ed99df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.762396 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14ac7adf-c7e3-4512-9c65-6361d005b4b7" (UID: "14ac7adf-c7e3-4512-9c65-6361d005b4b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.777044 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data" (OuterVolumeSpecName: "config-data") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.783105 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88971618-54e2-4670-be08-a6ae63ed99df" (UID: "88971618-54e2-4670-be08-a6ae63ed99df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.788820 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" (UID: "4571a7ec-45e1-4c4b-a96a-b9841b3d89bc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.796429 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.800655 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "a65953b8-4285-412b-9670-7747951a62ae" (UID: "a65953b8-4285-412b-9670-7747951a62ae"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.807711 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-config-data" (OuterVolumeSpecName: "config-data") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.813561 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.815001 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7a758cc8-4546-4982-b2a7-b7824ecfc118" (UID: "7a758cc8-4546-4982-b2a7-b7824ecfc118"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820687 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820720 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820732 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820744 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820757 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820769 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820780 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820791 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820802 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820815 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88971618-54e2-4670-be08-a6ae63ed99df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820826 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a758cc8-4546-4982-b2a7-b7824ecfc118-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820837 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14ac7adf-c7e3-4512-9c65-6361d005b4b7-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820848 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.820860 4941 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a65953b8-4285-412b-9670-7747951a62ae-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.824823 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.836419 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a376fae9-3d2f-4247-b917-0d63e6f4a9da" (UID: "a376fae9-3d2f-4247-b917-0d63e6f4a9da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.845855 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cf0e4aae-888b-4df8-a6e2-19a5f04b9656" (UID: "cf0e4aae-888b-4df8-a6e2-19a5f04b9656"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.856552 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.862537 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data" (OuterVolumeSpecName: "config-data") pod "1822cd0b-b52d-49d7-b787-a1091edfc585" (UID: "1822cd0b-b52d-49d7-b787-a1091edfc585"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.902512 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.908370 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.909875 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.909913 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="galera" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931047 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-logs\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931191 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-httpd-run\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931249 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmkzj\" (UniqueName: \"kubernetes.io/projected/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-kube-api-access-vmkzj\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931320 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-internal-tls-certs\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931387 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931438 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-scripts\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931480 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-config-data\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931562 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-combined-ca-bundle\") pod \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\" (UID: \"a573f7e0-ee6d-4847-a778-5f6ef41fd17f\") " Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.931636 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.932027 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-logs" (OuterVolumeSpecName: "logs") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.932084 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.932102 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf0e4aae-888b-4df8-a6e2-19a5f04b9656-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.932116 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.932131 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1822cd0b-b52d-49d7-b787-a1091edfc585-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.932143 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a376fae9-3d2f-4247-b917-0d63e6f4a9da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.934571 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 30 07:09:04 crc kubenswrapper[4941]: E1130 07:09:04.934651 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data podName:45978317-0f07-44da-8b74-fbaaec0e6105 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:12.934626456 +0000 UTC m=+1373.702798285 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data") pod "rabbitmq-cell1-server-0" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105") : configmap "rabbitmq-cell1-config-data" not found Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.943710 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"14ac7adf-c7e3-4512-9c65-6361d005b4b7","Type":"ContainerDied","Data":"98b31671fa8fd66e9e61127127016a8c4b60b1daedaf1033ccb1f0f97ad880fd"} Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.943765 4941 scope.go:117] "RemoveContainer" containerID="370789e569c2ad6cc7a3fb517040dd5ea003437e5aef35484b9575972bc086d9" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.943879 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.944272 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-scripts" (OuterVolumeSpecName: "scripts") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.950420 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a573f7e0-ee6d-4847-a778-5f6ef41fd17f","Type":"ContainerDied","Data":"30d8dd9623e9768efc57f7f70f7831339e77e587c388cf1163641171e80b166d"} Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.950444 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.953004 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-kube-api-access-vmkzj" (OuterVolumeSpecName: "kube-api-access-vmkzj") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "kube-api-access-vmkzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.960495 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.965739 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"88971618-54e2-4670-be08-a6ae63ed99df","Type":"ContainerDied","Data":"a03be0166325f65a71d81d071981e4f4c659c9f712b8c18b492acf4ba96108ec"} Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.965820 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.970379 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona2cc-account-delete-262pl" event={"ID":"bad7c3f7-8cec-4baf-808a-43184771d1da","Type":"ContainerStarted","Data":"cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e"} Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.970435 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutrona2cc-account-delete-262pl" podUID="bad7c3f7-8cec-4baf-808a-43184771d1da" containerName="mariadb-account-delete" containerID="cri-o://cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e" gracePeriod=30 Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.979106 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00ef1-account-delete-jbdnp" event={"ID":"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7","Type":"ContainerStarted","Data":"f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082"} Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.979223 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell00ef1-account-delete-jbdnp" podUID="e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" containerName="mariadb-account-delete" containerID="cri-o://f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082" gracePeriod=30 Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.984545 4941 scope.go:117] "RemoveContainer" containerID="6a679ded02687521bbab8ba0d141ec44027e549ed85d08f01b4106e0cd6db8b8" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.987917 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a376fae9-3d2f-4247-b917-0d63e6f4a9da","Type":"ContainerDied","Data":"f3c5cd2caf7497c4b65c0a14258d0f670900d8a52cd79944430769533545db82"} Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.988128 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 07:09:04 crc kubenswrapper[4941]: I1130 07:09:04.991456 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutrona2cc-account-delete-262pl" podStartSLOduration=7.991442805 podStartE2EDuration="7.991442805s" podCreationTimestamp="2025-11-30 07:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:09:04.987850116 +0000 UTC m=+1365.756021725" watchObservedRunningTime="2025-11-30 07:09:04.991442805 +0000 UTC m=+1365.759614414" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995262 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6794dcdcd8-t9v24" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995307 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7c9965466b-7rmfq" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995356 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder43da-account-delete-8nqxg" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995390 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995413 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican75ae-account-delete-7krwx" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995445 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995575 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novaapi2ee8-account-delete-jr5gz" podUID="4c62ddf4-ab03-4aa9-968b-ed0a8898d367" containerName="mariadb-account-delete" containerID="cri-o://a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af" gracePeriod=30 Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995805 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995838 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:04.995864 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.036015 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.054940 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.054976 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmkzj\" (UniqueName: \"kubernetes.io/projected/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-kube-api-access-vmkzj\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.055016 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.055030 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.055043 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.055733 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.066022 4941 scope.go:117] "RemoveContainer" containerID="9d4cad614888876b728ec4d308e74ba608a61af7f849e758a89e8b1fc6414299" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.088578 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.094240 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.102533 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.106283 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.107465 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-config-data" (OuterVolumeSpecName: "config-data") pod "a573f7e0-ee6d-4847-a778-5f6ef41fd17f" (UID: "a573f7e0-ee6d-4847-a778-5f6ef41fd17f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.127820 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.133149 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell00ef1-account-delete-jbdnp" podStartSLOduration=8.133111445 podStartE2EDuration="8.133111445s" podCreationTimestamp="2025-11-30 07:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:09:05.102798046 +0000 UTC m=+1365.870969655" watchObservedRunningTime="2025-11-30 07:09:05.133111445 +0000 UTC m=+1365.901283054" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.153878 4941 scope.go:117] "RemoveContainer" containerID="ef324eb6f3967c89fcde7a1338bf072fab857f22a3e354ed3b1cd701d98d5c93" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.154594 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.156336 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.156355 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.156365 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a573f7e0-ee6d-4847-a778-5f6ef41fd17f-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.171138 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.182907 4941 scope.go:117] "RemoveContainer" containerID="dfb235dd19464a6b133288b3327f7e345c63de23737ba8b6a478ad9216727e2b" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.192795 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.256822 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.262070 4941 scope.go:117] "RemoveContainer" containerID="68e80ed464e919522f447820599362734c316c8d7a5459adff802d5f94a52e5f" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.283654 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.294726 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.331371 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7c9965466b-7rmfq"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.356117 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-7c9965466b-7rmfq"] Nov 30 07:09:05 crc kubenswrapper[4941]: E1130 07:09:05.359272 4941 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 30 07:09:05 crc kubenswrapper[4941]: E1130 07:09:05.359359 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data podName:4e7a5ee5-1f0c-4819-a375-891a5e2cea03 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:13.359343604 +0000 UTC m=+1374.127515213 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data") pod "rabbitmq-server-0" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03") : configmap "rabbitmq-config-data" not found Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.367219 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6794dcdcd8-t9v24"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.373510 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6794dcdcd8-t9v24"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.378206 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican75ae-account-delete-7krwx"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.384298 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican75ae-account-delete-7krwx"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.390736 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.400494 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.406903 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.412128 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.419774 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.422072 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.435806 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.444950 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.471944 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.534438 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" path="/var/lib/kubelet/pods/14ac7adf-c7e3-4512-9c65-6361d005b4b7/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.534963 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" path="/var/lib/kubelet/pods/1822cd0b-b52d-49d7-b787-a1091edfc585/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.535652 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bb6c559-5c94-43b0-b6f0-3992652c720f" path="/var/lib/kubelet/pods/1bb6c559-5c94-43b0-b6f0-3992652c720f/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.536737 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" path="/var/lib/kubelet/pods/3315d9fd-71da-4f22-98d8-7142da896aab/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.538047 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" path="/var/lib/kubelet/pods/4571a7ec-45e1-4c4b-a96a-b9841b3d89bc/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.538702 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" path="/var/lib/kubelet/pods/74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.539449 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" path="/var/lib/kubelet/pods/7a758cc8-4546-4982-b2a7-b7824ecfc118/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.541854 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" path="/var/lib/kubelet/pods/7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.542716 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88971618-54e2-4670-be08-a6ae63ed99df" path="/var/lib/kubelet/pods/88971618-54e2-4670-be08-a6ae63ed99df/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.543291 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" path="/var/lib/kubelet/pods/a376fae9-3d2f-4247-b917-0d63e6f4a9da/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.546043 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" path="/var/lib/kubelet/pods/a573f7e0-ee6d-4847-a778-5f6ef41fd17f/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.546610 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a65953b8-4285-412b-9670-7747951a62ae" path="/var/lib/kubelet/pods/a65953b8-4285-412b-9670-7747951a62ae/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.547544 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" path="/var/lib/kubelet/pods/c05c5cf3-bcb4-4307-a601-fbecde4f026b/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.548043 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" path="/var/lib/kubelet/pods/cf0e4aae-888b-4df8-a6e2-19a5f04b9656/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.548663 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f311bacd-2cef-44fe-95c4-38a7462cd4a6" path="/var/lib/kubelet/pods/f311bacd-2cef-44fe-95c4-38a7462cd4a6/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.550029 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa90de41-9166-475c-925a-3d79b02a694d" path="/var/lib/kubelet/pods/fa90de41-9166-475c-925a-3d79b02a694d/volumes" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.562648 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-generated\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.562718 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-combined-ca-bundle\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.562745 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-galera-tls-certs\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.562791 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563025 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-operator-scripts\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563064 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5hsc\" (UniqueName: \"kubernetes.io/projected/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kube-api-access-s5hsc\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563087 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kolla-config\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563122 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-default\") pod \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\" (UID: \"79ca3b29-7cdd-4923-a12c-2f350d4b8728\") " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563126 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563469 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: E1130 07:09:05.563530 4941 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 30 07:09:05 crc kubenswrapper[4941]: E1130 07:09:05.563575 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts podName:4c62ddf4-ab03-4aa9-968b-ed0a8898d367 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:07.563561148 +0000 UTC m=+1368.331732757 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts") pod "novaapi2ee8-account-delete-jr5gz" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367") : configmap "openstack-scripts" not found Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.563804 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.565681 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.566123 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.569444 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kube-api-access-s5hsc" (OuterVolumeSpecName: "kube-api-access-s5hsc") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "kube-api-access-s5hsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.574611 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "mysql-db") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.623619 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.650477 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "79ca3b29-7cdd-4923-a12c-2f350d4b8728" (UID: "79ca3b29-7cdd-4923-a12c-2f350d4b8728"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672775 4941 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672815 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672932 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672942 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5hsc\" (UniqueName: \"kubernetes.io/projected/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kube-api-access-s5hsc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672952 4941 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672960 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79ca3b29-7cdd-4923-a12c-2f350d4b8728-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.672968 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79ca3b29-7cdd-4923-a12c-2f350d4b8728-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.704970 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.774829 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:05 crc kubenswrapper[4941]: I1130 07:09:05.976218 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.014012 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.026215 4941 generic.go:334] "Generic (PLEG): container finished" podID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerID="9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f" exitCode=0 Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.026280 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4e7a5ee5-1f0c-4819-a375-891a5e2cea03","Type":"ContainerDied","Data":"9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f"} Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.026301 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4e7a5ee5-1f0c-4819-a375-891a5e2cea03","Type":"ContainerDied","Data":"9fdd8964b5683e2ba4930996421236d204bc14b89d58477a6e06575f555eeda0"} Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.026335 4941 scope.go:117] "RemoveContainer" containerID="9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.026432 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.032000 4941 generic.go:334] "Generic (PLEG): container finished" podID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" exitCode=0 Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.032077 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.032096 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"79ca3b29-7cdd-4923-a12c-2f350d4b8728","Type":"ContainerDied","Data":"b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da"} Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.032126 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"79ca3b29-7cdd-4923-a12c-2f350d4b8728","Type":"ContainerDied","Data":"cd51c8555a2f56784fbfd3441828d11ee64a0a6d043a5f7298976d7819d57012"} Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.044646 4941 generic.go:334] "Generic (PLEG): container finished" podID="45978317-0f07-44da-8b74-fbaaec0e6105" containerID="0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08" exitCode=0 Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.044690 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"45978317-0f07-44da-8b74-fbaaec0e6105","Type":"ContainerDied","Data":"0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08"} Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.044718 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"45978317-0f07-44da-8b74-fbaaec0e6105","Type":"ContainerDied","Data":"2c33db5f06c6093a3b0ef71abd136c7c7d058f6546b2a67cf83e619aa974ef2b"} Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.044779 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078161 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-erlang-cookie\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078215 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-tls\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078252 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-pod-info\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078283 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-plugins-conf\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078312 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-erlang-cookie\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078965 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45978317-0f07-44da-8b74-fbaaec0e6105-erlang-cookie-secret\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.078994 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-server-conf\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079012 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45978317-0f07-44da-8b74-fbaaec0e6105-pod-info\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079039 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-erlang-cookie-secret\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079070 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079097 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-confd\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079114 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-plugins\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079138 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjvtf\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-kube-api-access-gjvtf\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079168 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079183 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-plugins-conf\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079208 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079240 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-plugins\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079256 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079271 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-confd\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079318 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-server-conf\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079476 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-tls\") pod \"45978317-0f07-44da-8b74-fbaaec0e6105\" (UID: \"45978317-0f07-44da-8b74-fbaaec0e6105\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.079503 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvjw9\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-kube-api-access-dvjw9\") pod \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\" (UID: \"4e7a5ee5-1f0c-4819-a375-891a5e2cea03\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.082435 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.082629 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45978317-0f07-44da-8b74-fbaaec0e6105-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.082936 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.083549 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.085387 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.085640 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.090707 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-pod-info" (OuterVolumeSpecName: "pod-info") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.091137 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.091293 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-kube-api-access-dvjw9" (OuterVolumeSpecName: "kube-api-access-dvjw9") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "kube-api-access-dvjw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.091499 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.094840 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/45978317-0f07-44da-8b74-fbaaec0e6105-pod-info" (OuterVolumeSpecName: "pod-info") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.094850 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.096149 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-kube-api-access-gjvtf" (OuterVolumeSpecName: "kube-api-access-gjvtf") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "kube-api-access-gjvtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.103734 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.104028 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.111984 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.114893 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.119938 4941 scope.go:117] "RemoveContainer" containerID="0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.127390 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.136552 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data" (OuterVolumeSpecName: "config-data") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.136712 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data" (OuterVolumeSpecName: "config-data") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.183775 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjvtf\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-kube-api-access-gjvtf\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184161 4941 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184174 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184232 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184245 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184261 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184295 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184307 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvjw9\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-kube-api-access-dvjw9\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184319 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184359 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184371 4941 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-pod-info\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184382 4941 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184396 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184406 4941 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/45978317-0f07-44da-8b74-fbaaec0e6105-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184442 4941 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/45978317-0f07-44da-8b74-fbaaec0e6105-pod-info\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184453 4941 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184467 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.184477 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.203783 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-server-conf" (OuterVolumeSpecName: "server-conf") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.206811 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-server-conf" (OuterVolumeSpecName: "server-conf") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.207814 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.210124 4941 scope.go:117] "RemoveContainer" containerID="9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.210788 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Nov 30 07:09:06 crc kubenswrapper[4941]: E1130 07:09:06.213080 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f\": container with ID starting with 9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f not found: ID does not exist" containerID="9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.213121 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f"} err="failed to get container status \"9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f\": rpc error: code = NotFound desc = could not find container \"9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f\": container with ID starting with 9336c46a7f93eff2c1c95ef2787a163a0ef1a6338ce4814e663bf7ccf6e6ba3f not found: ID does not exist" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.213147 4941 scope.go:117] "RemoveContainer" containerID="0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22" Nov 30 07:09:06 crc kubenswrapper[4941]: E1130 07:09:06.213560 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22\": container with ID starting with 0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22 not found: ID does not exist" containerID="0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.213589 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22"} err="failed to get container status \"0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22\": rpc error: code = NotFound desc = could not find container \"0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22\": container with ID starting with 0e7be72894db142820110f45d623c5d0708a4455ef259b4e3e8138e248399c22 not found: ID does not exist" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.213610 4941 scope.go:117] "RemoveContainer" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.232428 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4e7a5ee5-1f0c-4819-a375-891a5e2cea03" (UID: "4e7a5ee5-1f0c-4819-a375-891a5e2cea03"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.244913 4941 scope.go:117] "RemoveContainer" containerID="6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.270101 4941 scope.go:117] "RemoveContainer" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" Nov 30 07:09:06 crc kubenswrapper[4941]: E1130 07:09:06.270608 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da\": container with ID starting with b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da not found: ID does not exist" containerID="b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.270643 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da"} err="failed to get container status \"b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da\": rpc error: code = NotFound desc = could not find container \"b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da\": container with ID starting with b3bc75086483c5ebea88076ca96e0c713caeb617f04a043775abb9ee191e48da not found: ID does not exist" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.270662 4941 scope.go:117] "RemoveContainer" containerID="6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7" Nov 30 07:09:06 crc kubenswrapper[4941]: E1130 07:09:06.270922 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7\": container with ID starting with 6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7 not found: ID does not exist" containerID="6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.270939 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7"} err="failed to get container status \"6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7\": rpc error: code = NotFound desc = could not find container \"6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7\": container with ID starting with 6e22be391a8f9b4894810930342f6002715e23985ca3694f812967e9ee9cccf7 not found: ID does not exist" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.270953 4941 scope.go:117] "RemoveContainer" containerID="0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.277299 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "45978317-0f07-44da-8b74-fbaaec0e6105" (UID: "45978317-0f07-44da-8b74-fbaaec0e6105"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.286434 4941 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-server-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.286470 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4e7a5ee5-1f0c-4819-a375-891a5e2cea03-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.286481 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.286490 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.286500 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/45978317-0f07-44da-8b74-fbaaec0e6105-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.286508 4941 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/45978317-0f07-44da-8b74-fbaaec0e6105-server-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.301074 4941 scope.go:117] "RemoveContainer" containerID="4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.338015 4941 scope.go:117] "RemoveContainer" containerID="0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08" Nov 30 07:09:06 crc kubenswrapper[4941]: E1130 07:09:06.338616 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08\": container with ID starting with 0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08 not found: ID does not exist" containerID="0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.338655 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08"} err="failed to get container status \"0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08\": rpc error: code = NotFound desc = could not find container \"0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08\": container with ID starting with 0de5f610502d6638b2a749a98b93acbc488f48b51a775e06b06ca4c1012e1a08 not found: ID does not exist" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.338679 4941 scope.go:117] "RemoveContainer" containerID="4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479" Nov 30 07:09:06 crc kubenswrapper[4941]: E1130 07:09:06.339585 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479\": container with ID starting with 4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479 not found: ID does not exist" containerID="4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.339610 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479"} err="failed to get container status \"4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479\": rpc error: code = NotFound desc = could not find container \"4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479\": container with ID starting with 4167d8a3413d2475510af7736137df718f411d79744780a27c3acc5216161479 not found: ID does not exist" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.403995 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.429657 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.594834 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.608364 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.642738 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.697567 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-fernet-keys\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698204 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-scripts\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698243 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-credential-keys\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698278 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-config-data\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698316 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz7mp\" (UniqueName: \"kubernetes.io/projected/25deaa20-8f61-4317-ad4a-11df9ddff2fe-kube-api-access-nz7mp\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698459 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-internal-tls-certs\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698520 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-public-tls-certs\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.698542 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-combined-ca-bundle\") pod \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\" (UID: \"25deaa20-8f61-4317-ad4a-11df9ddff2fe\") " Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.716484 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-scripts" (OuterVolumeSpecName: "scripts") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.717874 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.719592 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.741300 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25deaa20-8f61-4317-ad4a-11df9ddff2fe-kube-api-access-nz7mp" (OuterVolumeSpecName: "kube-api-access-nz7mp") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "kube-api-access-nz7mp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.747981 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.754591 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-config-data" (OuterVolumeSpecName: "config-data") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.773421 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806375 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806419 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806427 4941 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806435 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806443 4941 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806452 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.806461 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz7mp\" (UniqueName: \"kubernetes.io/projected/25deaa20-8f61-4317-ad4a-11df9ddff2fe-kube-api-access-nz7mp\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.899597 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "25deaa20-8f61-4317-ad4a-11df9ddff2fe" (UID: "25deaa20-8f61-4317-ad4a-11df9ddff2fe"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.908275 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25deaa20-8f61-4317-ad4a-11df9ddff2fe-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:06 crc kubenswrapper[4941]: I1130 07:09:06.969437 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-29mwc"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.012724 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.013258 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.013690 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.013720 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.014085 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-29mwc"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.022753 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_322fb449-5599-45af-97e2-158692366d9b/ovn-northd/0.log" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.022823 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.037009 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb74a593_764a_416b_897b_539bafb29c70.slice/crio-conmon-012634e55142fed4221ecd68adf1f8141133456f2d0eb8c2e3aca13cafe2681c.scope\": RecentStats: unable to find data in memory cache]" Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.038667 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.038791 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-43da-account-create-update-9jzkv"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.054815 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.054980 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zcbz9" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" probeResult="failure" output=< Nov 30 07:09:07 crc kubenswrapper[4941]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Nov 30 07:09:07 crc kubenswrapper[4941]: > Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.055229 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder43da-account-delete-8nqxg"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.062845 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-43da-account-create-update-9jzkv"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.079494 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.079549 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.079898 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder43da-account-delete-8nqxg"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.114866 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/322fb449-5599-45af-97e2-158692366d9b-ovn-rundir\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.114942 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-scripts\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.114971 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-metrics-certs-tls-certs\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.115010 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-combined-ca-bundle\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.115065 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-config\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.115081 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv8z8\" (UniqueName: \"kubernetes.io/projected/322fb449-5599-45af-97e2-158692366d9b-kube-api-access-bv8z8\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.115133 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-ovn-northd-tls-certs\") pod \"322fb449-5599-45af-97e2-158692366d9b\" (UID: \"322fb449-5599-45af-97e2-158692366d9b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.115853 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/322fb449-5599-45af-97e2-158692366d9b-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.116175 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-scripts" (OuterVolumeSpecName: "scripts") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.116450 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-config" (OuterVolumeSpecName: "config") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.124810 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_322fb449-5599-45af-97e2-158692366d9b/ovn-northd/0.log" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.124855 4941 generic.go:334] "Generic (PLEG): container finished" podID="322fb449-5599-45af-97e2-158692366d9b" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" exitCode=139 Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.124910 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"322fb449-5599-45af-97e2-158692366d9b","Type":"ContainerDied","Data":"eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274"} Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.124939 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"322fb449-5599-45af-97e2-158692366d9b","Type":"ContainerDied","Data":"a27920a1da68143556f6bb75d8290f2b7b6ff656559929b9f3e93d0181c13e92"} Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.124958 4941 scope.go:117] "RemoveContainer" containerID="580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.125233 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.133521 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/322fb449-5599-45af-97e2-158692366d9b-kube-api-access-bv8z8" (OuterVolumeSpecName: "kube-api-access-bv8z8") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "kube-api-access-bv8z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.138936 4941 generic.go:334] "Generic (PLEG): container finished" podID="25deaa20-8f61-4317-ad4a-11df9ddff2fe" containerID="b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4" exitCode=0 Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.139028 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79cf87bd4d-c9dvr" event={"ID":"25deaa20-8f61-4317-ad4a-11df9ddff2fe","Type":"ContainerDied","Data":"b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4"} Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.139217 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79cf87bd4d-c9dvr" event={"ID":"25deaa20-8f61-4317-ad4a-11df9ddff2fe","Type":"ContainerDied","Data":"08438617d7f95c8ba2fa018c7919936730de77fe645ce598be142c0420069cdc"} Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.139186 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79cf87bd4d-c9dvr" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.149140 4941 generic.go:334] "Generic (PLEG): container finished" podID="fb74a593-764a-416b-897b-539bafb29c70" containerID="012634e55142fed4221ecd68adf1f8141133456f2d0eb8c2e3aca13cafe2681c" exitCode=0 Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.149178 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74d6754465-nglc5" event={"ID":"fb74a593-764a-416b-897b-539bafb29c70","Type":"ContainerDied","Data":"012634e55142fed4221ecd68adf1f8141133456f2d0eb8c2e3aca13cafe2681c"} Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.153260 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.207262 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-79cf87bd4d-c9dvr"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.229112 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.229137 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.229148 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/322fb449-5599-45af-97e2-158692366d9b-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.229159 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv8z8\" (UniqueName: \"kubernetes.io/projected/322fb449-5599-45af-97e2-158692366d9b-kube-api-access-bv8z8\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.229168 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/322fb449-5599-45af-97e2-158692366d9b-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.232400 4941 scope.go:117] "RemoveContainer" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.238273 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.245096 4941 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Nov 30 07:09:07 crc kubenswrapper[4941]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-11-30T07:08:59Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Nov 30 07:09:07 crc kubenswrapper[4941]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Nov 30 07:09:07 crc kubenswrapper[4941]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-zcbz9" message=< Nov 30 07:09:07 crc kubenswrapper[4941]: Exiting ovn-controller (1) [FAILED] Nov 30 07:09:07 crc kubenswrapper[4941]: Killing ovn-controller (1) [ OK ] Nov 30 07:09:07 crc kubenswrapper[4941]: Killing ovn-controller (1) with SIGKILL [ OK ] Nov 30 07:09:07 crc kubenswrapper[4941]: 2025-11-30T07:08:59Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Nov 30 07:09:07 crc kubenswrapper[4941]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Nov 30 07:09:07 crc kubenswrapper[4941]: > Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.245131 4941 kuberuntime_container.go:691] "PreStop hook failed" err=< Nov 30 07:09:07 crc kubenswrapper[4941]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-11-30T07:08:59Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Nov 30 07:09:07 crc kubenswrapper[4941]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Nov 30 07:09:07 crc kubenswrapper[4941]: > pod="openstack/ovn-controller-zcbz9" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" containerID="cri-o://fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.245162 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-zcbz9" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" containerID="cri-o://fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1" gracePeriod=21 Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.248323 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "322fb449-5599-45af-97e2-158692366d9b" (UID: "322fb449-5599-45af-97e2-158692366d9b"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.257666 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-79cf87bd4d-c9dvr"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.331362 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.331393 4941 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/322fb449-5599-45af-97e2-158692366d9b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.395825 4941 scope.go:117] "RemoveContainer" containerID="580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283" Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.396203 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283\": container with ID starting with 580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283 not found: ID does not exist" containerID="580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.396233 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283"} err="failed to get container status \"580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283\": rpc error: code = NotFound desc = could not find container \"580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283\": container with ID starting with 580aa2e2061ce2cac4745623c3005969bdcc10ed2bf699cc29410a7c22740283 not found: ID does not exist" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.396253 4941 scope.go:117] "RemoveContainer" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.396722 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274\": container with ID starting with eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274 not found: ID does not exist" containerID="eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.396766 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274"} err="failed to get container status \"eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274\": rpc error: code = NotFound desc = could not find container \"eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274\": container with ID starting with eebdda228dfb1ad19e1bb03a3c0a96d3dd37e007d9a9054f0e89ec6559ff9274 not found: ID does not exist" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.396793 4941 scope.go:117] "RemoveContainer" containerID="b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.480600 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.505872 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.515737 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.524569 4941 scope.go:117] "RemoveContainer" containerID="b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4" Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.525249 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4\": container with ID starting with b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4 not found: ID does not exist" containerID="b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.525276 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4"} err="failed to get container status \"b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4\": rpc error: code = NotFound desc = could not find container \"b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4\": container with ID starting with b1acce04363677ce194be5301ca53f3f6d69bdfb0fbe0b7f194fb14c32074ee4 not found: ID does not exist" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.531194 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0851a1f1-5a9f-4609-b638-33da1fca0f01" path="/var/lib/kubelet/pods/0851a1f1-5a9f-4609-b638-33da1fca0f01/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.531705 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4d49ca-a8ee-4cd9-acd3-349c3c616627" path="/var/lib/kubelet/pods/1f4d49ca-a8ee-4cd9-acd3-349c3c616627/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.532291 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25deaa20-8f61-4317-ad4a-11df9ddff2fe" path="/var/lib/kubelet/pods/25deaa20-8f61-4317-ad4a-11df9ddff2fe/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.533533 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="322fb449-5599-45af-97e2-158692366d9b" path="/var/lib/kubelet/pods/322fb449-5599-45af-97e2-158692366d9b/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.534484 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" path="/var/lib/kubelet/pods/45978317-0f07-44da-8b74-fbaaec0e6105/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.535402 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" path="/var/lib/kubelet/pods/4e7a5ee5-1f0c-4819-a375-891a5e2cea03/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.536559 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" path="/var/lib/kubelet/pods/79ca3b29-7cdd-4923-a12c-2f350d4b8728/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.537111 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4c68094-a8b8-4327-9ae1-335226d3b938" path="/var/lib/kubelet/pods/c4c68094-a8b8-4327-9ae1-335226d3b938/volumes" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.641994 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb74a593-764a-416b-897b-539bafb29c70-logs\") pod \"fb74a593-764a-416b-897b-539bafb29c70\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.642128 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data\") pod \"fb74a593-764a-416b-897b-539bafb29c70\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.642184 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data-custom\") pod \"fb74a593-764a-416b-897b-539bafb29c70\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.642225 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-combined-ca-bundle\") pod \"fb74a593-764a-416b-897b-539bafb29c70\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.642312 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn6v2\" (UniqueName: \"kubernetes.io/projected/fb74a593-764a-416b-897b-539bafb29c70-kube-api-access-dn6v2\") pod \"fb74a593-764a-416b-897b-539bafb29c70\" (UID: \"fb74a593-764a-416b-897b-539bafb29c70\") " Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.642638 4941 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 30 07:09:07 crc kubenswrapper[4941]: E1130 07:09:07.642688 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts podName:4c62ddf4-ab03-4aa9-968b-ed0a8898d367 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:11.642674857 +0000 UTC m=+1372.410846466 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts") pod "novaapi2ee8-account-delete-jr5gz" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367") : configmap "openstack-scripts" not found Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.643446 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb74a593-764a-416b-897b-539bafb29c70-logs" (OuterVolumeSpecName: "logs") pod "fb74a593-764a-416b-897b-539bafb29c70" (UID: "fb74a593-764a-416b-897b-539bafb29c70"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.649912 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fb74a593-764a-416b-897b-539bafb29c70" (UID: "fb74a593-764a-416b-897b-539bafb29c70"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.650792 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb74a593-764a-416b-897b-539bafb29c70-kube-api-access-dn6v2" (OuterVolumeSpecName: "kube-api-access-dn6v2") pod "fb74a593-764a-416b-897b-539bafb29c70" (UID: "fb74a593-764a-416b-897b-539bafb29c70"). InnerVolumeSpecName "kube-api-access-dn6v2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.688270 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb74a593-764a-416b-897b-539bafb29c70" (UID: "fb74a593-764a-416b-897b-539bafb29c70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.703548 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data" (OuterVolumeSpecName: "config-data") pod "fb74a593-764a-416b-897b-539bafb29c70" (UID: "fb74a593-764a-416b-897b-539bafb29c70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.743827 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn6v2\" (UniqueName: \"kubernetes.io/projected/fb74a593-764a-416b-897b-539bafb29c70-kube-api-access-dn6v2\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.743858 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb74a593-764a-416b-897b-539bafb29c70-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.743869 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.743882 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.743893 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb74a593-764a-416b-897b-539bafb29c70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.753675 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-zcbz9_e01076ff-d267-4931-8788-47eee9ebfd76/ovn-controller/0.log" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.753733 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.761447 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.763570 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.844937 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-combined-ca-bundle\") pod \"126a91a7-8a81-40ef-87db-383ed37a26f4\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.844983 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vthpx\" (UniqueName: \"kubernetes.io/projected/126a91a7-8a81-40ef-87db-383ed37a26f4-kube-api-access-vthpx\") pod \"126a91a7-8a81-40ef-87db-383ed37a26f4\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845015 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-scripts\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845034 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/126a91a7-8a81-40ef-87db-383ed37a26f4-logs\") pod \"126a91a7-8a81-40ef-87db-383ed37a26f4\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845063 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqfqm\" (UniqueName: \"kubernetes.io/projected/e01076ff-d267-4931-8788-47eee9ebfd76-kube-api-access-cqfqm\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845084 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-combined-ca-bundle\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845108 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-combined-ca-bundle\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845142 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-run-httpd\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845165 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-config-data\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845182 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-log-httpd\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845199 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-ovn-controller-tls-certs\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845226 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-ceilometer-tls-certs\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845246 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data-custom\") pod \"126a91a7-8a81-40ef-87db-383ed37a26f4\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845604 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01076ff-d267-4931-8788-47eee9ebfd76-scripts\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845623 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvshn\" (UniqueName: \"kubernetes.io/projected/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-kube-api-access-vvshn\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845656 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run-ovn\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845696 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data\") pod \"126a91a7-8a81-40ef-87db-383ed37a26f4\" (UID: \"126a91a7-8a81-40ef-87db-383ed37a26f4\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845722 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-log-ovn\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845785 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-sg-core-conf-yaml\") pod \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\" (UID: \"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845819 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run\") pod \"e01076ff-d267-4931-8788-47eee9ebfd76\" (UID: \"e01076ff-d267-4931-8788-47eee9ebfd76\") " Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.845934 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.846161 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.846189 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run" (OuterVolumeSpecName: "var-run") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.846460 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.847567 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e01076ff-d267-4931-8788-47eee9ebfd76-scripts" (OuterVolumeSpecName: "scripts") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.847727 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.848927 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.849257 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/126a91a7-8a81-40ef-87db-383ed37a26f4-logs" (OuterVolumeSpecName: "logs") pod "126a91a7-8a81-40ef-87db-383ed37a26f4" (UID: "126a91a7-8a81-40ef-87db-383ed37a26f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.853387 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/126a91a7-8a81-40ef-87db-383ed37a26f4-kube-api-access-vthpx" (OuterVolumeSpecName: "kube-api-access-vthpx") pod "126a91a7-8a81-40ef-87db-383ed37a26f4" (UID: "126a91a7-8a81-40ef-87db-383ed37a26f4"). InnerVolumeSpecName "kube-api-access-vthpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.853479 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "126a91a7-8a81-40ef-87db-383ed37a26f4" (UID: "126a91a7-8a81-40ef-87db-383ed37a26f4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.856542 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-kube-api-access-vvshn" (OuterVolumeSpecName: "kube-api-access-vvshn") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "kube-api-access-vvshn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.870727 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e01076ff-d267-4931-8788-47eee9ebfd76-kube-api-access-cqfqm" (OuterVolumeSpecName: "kube-api-access-cqfqm") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "kube-api-access-cqfqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.879475 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-scripts" (OuterVolumeSpecName: "scripts") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.884546 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.886232 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.888543 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "126a91a7-8a81-40ef-87db-383ed37a26f4" (UID: "126a91a7-8a81-40ef-87db-383ed37a26f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.893593 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.895184 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data" (OuterVolumeSpecName: "config-data") pod "126a91a7-8a81-40ef-87db-383ed37a26f4" (UID: "126a91a7-8a81-40ef-87db-383ed37a26f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.908011 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "e01076ff-d267-4931-8788-47eee9ebfd76" (UID: "e01076ff-d267-4931-8788-47eee9ebfd76"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.919536 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.932320 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-config-data" (OuterVolumeSpecName: "config-data") pod "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" (UID: "22508ba9-7cfd-462a-9b94-3ee1d8c0a15b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.947926 4941 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.947958 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.947972 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vthpx\" (UniqueName: \"kubernetes.io/projected/126a91a7-8a81-40ef-87db-383ed37a26f4-kube-api-access-vthpx\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.947985 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.947998 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/126a91a7-8a81-40ef-87db-383ed37a26f4-logs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948009 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqfqm\" (UniqueName: \"kubernetes.io/projected/e01076ff-d267-4931-8788-47eee9ebfd76-kube-api-access-cqfqm\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948020 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948032 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948044 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948056 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948069 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/e01076ff-d267-4931-8788-47eee9ebfd76-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948081 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948095 4941 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948107 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e01076ff-d267-4931-8788-47eee9ebfd76-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948119 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvshn\" (UniqueName: \"kubernetes.io/projected/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-kube-api-access-vvshn\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948129 4941 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948144 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/126a91a7-8a81-40ef-87db-383ed37a26f4-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948159 4941 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e01076ff-d267-4931-8788-47eee9ebfd76-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:07 crc kubenswrapper[4941]: I1130 07:09:07.948175 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.174749 4941 generic.go:334] "Generic (PLEG): container finished" podID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerID="83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af" exitCode=0 Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.174828 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" event={"ID":"126a91a7-8a81-40ef-87db-383ed37a26f4","Type":"ContainerDied","Data":"83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.174852 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" event={"ID":"126a91a7-8a81-40ef-87db-383ed37a26f4","Type":"ContainerDied","Data":"f20855645344da1913360ef50f8d99f2d5c39b4ee43b823bc1f5141acf25fbe5"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.174868 4941 scope.go:117] "RemoveContainer" containerID="83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.174959 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7987c5dbd6-p8ncc" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.184343 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-zcbz9_e01076ff-d267-4931-8788-47eee9ebfd76/ovn-controller/0.log" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.184528 4941 generic.go:334] "Generic (PLEG): container finished" podID="e01076ff-d267-4931-8788-47eee9ebfd76" containerID="fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1" exitCode=137 Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.184690 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zcbz9" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.184716 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9" event={"ID":"e01076ff-d267-4931-8788-47eee9ebfd76","Type":"ContainerDied","Data":"fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.184766 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zcbz9" event={"ID":"e01076ff-d267-4931-8788-47eee9ebfd76","Type":"ContainerDied","Data":"138e8808dd91953fcac23eb199fdb811bb1b86dd2260525c2884d5339786f8ab"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.196214 4941 generic.go:334] "Generic (PLEG): container finished" podID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerID="b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b" exitCode=0 Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.199352 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.199422 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerDied","Data":"b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.200764 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22508ba9-7cfd-462a-9b94-3ee1d8c0a15b","Type":"ContainerDied","Data":"d4a8fe8ec71334257724b982b05a4d03e0a109e845c1904eef0241236daa8001"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.208568 4941 scope.go:117] "RemoveContainer" containerID="ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.216112 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74d6754465-nglc5" event={"ID":"fb74a593-764a-416b-897b-539bafb29c70","Type":"ContainerDied","Data":"1a4fa8f94dcc08ed93adee28c101d6d0115b4bf5bfaa85632ebf33655179ec1d"} Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.216214 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74d6754465-nglc5" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.233657 4941 scope.go:117] "RemoveContainer" containerID="83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af" Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.234224 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af\": container with ID starting with 83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af not found: ID does not exist" containerID="83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.234307 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af"} err="failed to get container status \"83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af\": rpc error: code = NotFound desc = could not find container \"83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af\": container with ID starting with 83a4e2e633dcee6761feb9d18664c0d36371a4047449cd2a19d8e1b2f20398af not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.234452 4941 scope.go:117] "RemoveContainer" containerID="ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686" Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.234821 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686\": container with ID starting with ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686 not found: ID does not exist" containerID="ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.234889 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686"} err="failed to get container status \"ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686\": rpc error: code = NotFound desc = could not find container \"ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686\": container with ID starting with ab411c7b32f40b67ea2c0e06955df6eaf7b7914062d6a226507fc399b37fd686 not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.234904 4941 scope.go:117] "RemoveContainer" containerID="fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.238443 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-zcbz9"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.246587 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-zcbz9"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.283079 4941 scope.go:117] "RemoveContainer" containerID="fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.283135 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.283518 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1\": container with ID starting with fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1 not found: ID does not exist" containerID="fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.283555 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1"} err="failed to get container status \"fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1\": rpc error: code = NotFound desc = could not find container \"fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1\": container with ID starting with fc6c5369120c722f4e6ffb14db33eab89e594f65ddab1f8845a7d5308109e0f1 not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.283579 4941 scope.go:117] "RemoveContainer" containerID="c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.298996 4941 scope.go:117] "RemoveContainer" containerID="89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.299010 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.306544 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7987c5dbd6-p8ncc"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.310314 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-7987c5dbd6-p8ncc"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.315056 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-74d6754465-nglc5"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.320696 4941 scope.go:117] "RemoveContainer" containerID="b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.322092 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-74d6754465-nglc5"] Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.343154 4941 scope.go:117] "RemoveContainer" containerID="258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.370512 4941 scope.go:117] "RemoveContainer" containerID="c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625" Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.370862 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625\": container with ID starting with c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625 not found: ID does not exist" containerID="c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.370894 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625"} err="failed to get container status \"c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625\": rpc error: code = NotFound desc = could not find container \"c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625\": container with ID starting with c174cc40e9f5f0e335589224ff66f32b8accbaf4b27b2b7d462a5b3a05c8c625 not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.370914 4941 scope.go:117] "RemoveContainer" containerID="89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4" Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.371213 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4\": container with ID starting with 89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4 not found: ID does not exist" containerID="89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.371235 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4"} err="failed to get container status \"89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4\": rpc error: code = NotFound desc = could not find container \"89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4\": container with ID starting with 89534db53dd5162e885cc70da42f84244da60b3122c4a9554f7b0a3bb0c46cd4 not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.371249 4941 scope.go:117] "RemoveContainer" containerID="b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b" Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.371580 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b\": container with ID starting with b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b not found: ID does not exist" containerID="b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.371601 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b"} err="failed to get container status \"b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b\": rpc error: code = NotFound desc = could not find container \"b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b\": container with ID starting with b5c83af196623a929e8e99ba0a5874c7c7e3551db33b95bb2770811f605ba18b not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.371614 4941 scope.go:117] "RemoveContainer" containerID="258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d" Nov 30 07:09:08 crc kubenswrapper[4941]: E1130 07:09:08.371841 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d\": container with ID starting with 258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d not found: ID does not exist" containerID="258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.371879 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d"} err="failed to get container status \"258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d\": rpc error: code = NotFound desc = could not find container \"258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d\": container with ID starting with 258697bd13d27b1cd20f257ea9600031b89f7a4b29dd4caf8e6daad793e14c2d not found: ID does not exist" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.371891 4941 scope.go:117] "RemoveContainer" containerID="012634e55142fed4221ecd68adf1f8141133456f2d0eb8c2e3aca13cafe2681c" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.434578 4941 scope.go:117] "RemoveContainer" containerID="73cb23f3d6e038846d4f5b8f18403308641c3a8d002d0c1fd63ffe41c380b3ef" Nov 30 07:09:08 crc kubenswrapper[4941]: I1130 07:09:08.788589 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-5678756fc7-642xv" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.148:9696/\": dial tcp 10.217.0.148:9696: connect: connection refused" Nov 30 07:09:09 crc kubenswrapper[4941]: I1130 07:09:09.532871 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" path="/var/lib/kubelet/pods/126a91a7-8a81-40ef-87db-383ed37a26f4/volumes" Nov 30 07:09:09 crc kubenswrapper[4941]: I1130 07:09:09.533884 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" path="/var/lib/kubelet/pods/22508ba9-7cfd-462a-9b94-3ee1d8c0a15b/volumes" Nov 30 07:09:09 crc kubenswrapper[4941]: I1130 07:09:09.535576 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" path="/var/lib/kubelet/pods/e01076ff-d267-4931-8788-47eee9ebfd76/volumes" Nov 30 07:09:09 crc kubenswrapper[4941]: I1130 07:09:09.536379 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb74a593-764a-416b-897b-539bafb29c70" path="/var/lib/kubelet/pods/fb74a593-764a-416b-897b-539bafb29c70/volumes" Nov 30 07:09:11 crc kubenswrapper[4941]: E1130 07:09:11.717768 4941 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 30 07:09:11 crc kubenswrapper[4941]: E1130 07:09:11.718999 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts podName:4c62ddf4-ab03-4aa9-968b-ed0a8898d367 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:19.718977327 +0000 UTC m=+1380.487148936 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts") pod "novaapi2ee8-account-delete-jr5gz" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367") : configmap "openstack-scripts" not found Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.009009 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.009991 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.010274 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.011957 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.012269 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.012514 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.014477 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:12 crc kubenswrapper[4941]: E1130 07:09:12.014715 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.010111 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.011133 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.011545 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.011664 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.011723 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.013388 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.016584 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:17 crc kubenswrapper[4941]: E1130 07:09:17.016637 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:19 crc kubenswrapper[4941]: E1130 07:09:19.746961 4941 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 30 07:09:19 crc kubenswrapper[4941]: E1130 07:09:19.747089 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts podName:4c62ddf4-ab03-4aa9-968b-ed0a8898d367 nodeName:}" failed. No retries permitted until 2025-11-30 07:09:35.747070442 +0000 UTC m=+1396.515242051 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts") pod "novaapi2ee8-account-delete-jr5gz" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367") : configmap "openstack-scripts" not found Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.243091 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.351344 4941 generic.go:334] "Generic (PLEG): container finished" podID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerID="2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc" exitCode=0 Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.351383 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5678756fc7-642xv" event={"ID":"9dfbf8e6-60f7-47a0-9fee-3d532daf0503","Type":"ContainerDied","Data":"2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc"} Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.351406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5678756fc7-642xv" event={"ID":"9dfbf8e6-60f7-47a0-9fee-3d532daf0503","Type":"ContainerDied","Data":"334e9f4946147d9c94c287f3fb1a4305db1153961bed62868fbe365ca4ca7591"} Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.351423 4941 scope.go:117] "RemoveContainer" containerID="539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.351425 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5678756fc7-642xv" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.370858 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-httpd-config\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.370944 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-ovndb-tls-certs\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.371038 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-public-tls-certs\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.371097 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g87xh\" (UniqueName: \"kubernetes.io/projected/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-kube-api-access-g87xh\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.371150 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-config\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.371250 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-internal-tls-certs\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.371407 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-combined-ca-bundle\") pod \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\" (UID: \"9dfbf8e6-60f7-47a0-9fee-3d532daf0503\") " Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.380472 4941 scope.go:117] "RemoveContainer" containerID="2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.383871 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.389451 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-kube-api-access-g87xh" (OuterVolumeSpecName: "kube-api-access-g87xh") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "kube-api-access-g87xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.410092 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.410531 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.412690 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.428064 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.428435 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-config" (OuterVolumeSpecName: "config") pod "9dfbf8e6-60f7-47a0-9fee-3d532daf0503" (UID: "9dfbf8e6-60f7-47a0-9fee-3d532daf0503"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473305 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473366 4941 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473378 4941 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473387 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g87xh\" (UniqueName: \"kubernetes.io/projected/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-kube-api-access-g87xh\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473397 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473424 4941 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.473436 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dfbf8e6-60f7-47a0-9fee-3d532daf0503-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.497058 4941 scope.go:117] "RemoveContainer" containerID="539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575" Nov 30 07:09:21 crc kubenswrapper[4941]: E1130 07:09:21.497485 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575\": container with ID starting with 539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575 not found: ID does not exist" containerID="539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.497539 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575"} err="failed to get container status \"539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575\": rpc error: code = NotFound desc = could not find container \"539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575\": container with ID starting with 539de922803c649d8b631e4f8fc264d321df7e8c147822dea35918eefa0da575 not found: ID does not exist" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.497567 4941 scope.go:117] "RemoveContainer" containerID="2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc" Nov 30 07:09:21 crc kubenswrapper[4941]: E1130 07:09:21.497896 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc\": container with ID starting with 2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc not found: ID does not exist" containerID="2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.497927 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc"} err="failed to get container status \"2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc\": rpc error: code = NotFound desc = could not find container \"2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc\": container with ID starting with 2b04c7c8a66b8b0335c5f4e36405532a43ed5dce27ddddf2e401ac61ace874fc not found: ID does not exist" Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.682990 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5678756fc7-642xv"] Nov 30 07:09:21 crc kubenswrapper[4941]: I1130 07:09:21.692928 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5678756fc7-642xv"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.008113 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.008499 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.008966 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.009037 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.009687 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.011567 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.013076 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:22 crc kubenswrapper[4941]: E1130 07:09:22.013117 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:23 crc kubenswrapper[4941]: I1130 07:09:23.531311 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" path="/var/lib/kubelet/pods/9dfbf8e6-60f7-47a0-9fee-3d532daf0503/volumes" Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.008574 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.008974 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.009396 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.009594 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.009621 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.010512 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.011658 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 30 07:09:27 crc kubenswrapper[4941]: E1130 07:09:27.011695 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-slfpx" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.115762 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.145853 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.145936 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-lock\") pod \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.145983 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhs6p\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-kube-api-access-mhs6p\") pod \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.146047 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") pod \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.146202 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-cache\") pod \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\" (UID: \"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca\") " Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.146538 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-lock" (OuterVolumeSpecName: "lock") pod "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.147458 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-cache" (OuterVolumeSpecName: "cache") pod "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.170149 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.174134 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "swift") pod "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.176484 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-kube-api-access-mhs6p" (OuterVolumeSpecName: "kube-api-access-mhs6p") pod "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" (UID: "e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca"). InnerVolumeSpecName "kube-api-access-mhs6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.248062 4941 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-cache\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.248118 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.248128 4941 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-lock\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.248139 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhs6p\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-kube-api-access-mhs6p\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.248148 4941 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.270944 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.349433 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.446814 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerID="70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb" exitCode=137 Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.446959 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.446957 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb"} Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.447120 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca","Type":"ContainerDied","Data":"528e1c3a07f331f4482cc697dc5f8de6c640ce9f0544cb8518a4ad1d933d98e4"} Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.447159 4941 scope.go:117] "RemoveContainer" containerID="70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.452139 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-slfpx_af96ea99-9953-4e58-8ecc-0999730fcaf9/ovs-vswitchd/0.log" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.454789 4941 generic.go:334] "Generic (PLEG): container finished" podID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" exitCode=137 Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.454837 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerDied","Data":"ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f"} Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.473097 4941 scope.go:117] "RemoveContainer" containerID="6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.488950 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.497880 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.498350 4941 scope.go:117] "RemoveContainer" containerID="e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.519803 4941 scope.go:117] "RemoveContainer" containerID="7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.540868 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" path="/var/lib/kubelet/pods/e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca/volumes" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.548712 4941 scope.go:117] "RemoveContainer" containerID="5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.582038 4941 scope.go:117] "RemoveContainer" containerID="2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.616654 4941 scope.go:117] "RemoveContainer" containerID="23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.640138 4941 scope.go:117] "RemoveContainer" containerID="b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.662079 4941 scope.go:117] "RemoveContainer" containerID="0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.705563 4941 scope.go:117] "RemoveContainer" containerID="214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.723322 4941 scope.go:117] "RemoveContainer" containerID="7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.742099 4941 scope.go:117] "RemoveContainer" containerID="dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.778257 4941 scope.go:117] "RemoveContainer" containerID="30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.796516 4941 scope.go:117] "RemoveContainer" containerID="a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.813186 4941 scope.go:117] "RemoveContainer" containerID="878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.839665 4941 scope.go:117] "RemoveContainer" containerID="70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.840088 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb\": container with ID starting with 70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb not found: ID does not exist" containerID="70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.840112 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb"} err="failed to get container status \"70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb\": rpc error: code = NotFound desc = could not find container \"70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb\": container with ID starting with 70031c02cd6870983e69f0f361386b073cbc6040830f7824333c1161dcd230fb not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.840132 4941 scope.go:117] "RemoveContainer" containerID="6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.840540 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf\": container with ID starting with 6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf not found: ID does not exist" containerID="6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.840561 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf"} err="failed to get container status \"6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf\": rpc error: code = NotFound desc = could not find container \"6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf\": container with ID starting with 6d93377c83f8d10da9c548fab55c33dd2d4d20ddcae1e1d14569c10189ac70cf not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.840576 4941 scope.go:117] "RemoveContainer" containerID="e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.840864 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34\": container with ID starting with e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34 not found: ID does not exist" containerID="e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.840899 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34"} err="failed to get container status \"e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34\": rpc error: code = NotFound desc = could not find container \"e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34\": container with ID starting with e3d6497bc72cfa09a3cc60dc32b9715f64d481c51912823d7366c8a4b421bf34 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.840925 4941 scope.go:117] "RemoveContainer" containerID="7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.841228 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed\": container with ID starting with 7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed not found: ID does not exist" containerID="7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.841244 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed"} err="failed to get container status \"7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed\": rpc error: code = NotFound desc = could not find container \"7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed\": container with ID starting with 7d0f488a4b9e5fa2e4d358844ea785152e93daeb2afb2008a839305b2903b0ed not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.841256 4941 scope.go:117] "RemoveContainer" containerID="5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.841545 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc\": container with ID starting with 5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc not found: ID does not exist" containerID="5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.841563 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc"} err="failed to get container status \"5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc\": rpc error: code = NotFound desc = could not find container \"5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc\": container with ID starting with 5d1800ec5785efa2158588497e688919358987934ddcb518f6aaad36b07a3edc not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.841575 4941 scope.go:117] "RemoveContainer" containerID="2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.841807 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44\": container with ID starting with 2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44 not found: ID does not exist" containerID="2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.841826 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44"} err="failed to get container status \"2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44\": rpc error: code = NotFound desc = could not find container \"2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44\": container with ID starting with 2e0f0227d0f42bf052e1b8ed320543d91b1af39180167acc52b7d5af8e9bda44 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.841840 4941 scope.go:117] "RemoveContainer" containerID="23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.842160 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48\": container with ID starting with 23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48 not found: ID does not exist" containerID="23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.842179 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48"} err="failed to get container status \"23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48\": rpc error: code = NotFound desc = could not find container \"23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48\": container with ID starting with 23b6b9d5d9a72b064d8285b8f72903f395eda48aa4b6d105d81b36334efa5c48 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.842191 4941 scope.go:117] "RemoveContainer" containerID="b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.842612 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc\": container with ID starting with b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc not found: ID does not exist" containerID="b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.842653 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc"} err="failed to get container status \"b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc\": rpc error: code = NotFound desc = could not find container \"b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc\": container with ID starting with b5422dcf8cc235fe576a3105964c0413ec7a149f20a4d04dfee4da86f098e7cc not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.842685 4941 scope.go:117] "RemoveContainer" containerID="0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.843004 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5\": container with ID starting with 0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5 not found: ID does not exist" containerID="0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843030 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5"} err="failed to get container status \"0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5\": rpc error: code = NotFound desc = could not find container \"0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5\": container with ID starting with 0b692109095787b07227eeaca7d58fecc795b5b86987fa564f52a5dd8e40f4d5 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843045 4941 scope.go:117] "RemoveContainer" containerID="214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.843342 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2\": container with ID starting with 214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2 not found: ID does not exist" containerID="214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843371 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2"} err="failed to get container status \"214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2\": rpc error: code = NotFound desc = could not find container \"214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2\": container with ID starting with 214fe96200a19f72bfe9668f0a81ac6ef6ec891df71ef9182dbb65e66050e7a2 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843386 4941 scope.go:117] "RemoveContainer" containerID="7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.843600 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99\": container with ID starting with 7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99 not found: ID does not exist" containerID="7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843627 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99"} err="failed to get container status \"7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99\": rpc error: code = NotFound desc = could not find container \"7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99\": container with ID starting with 7804416f6f93dcd3be9d39cb88c979810dc8c09883a6702e201793d36f7ffa99 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843642 4941 scope.go:117] "RemoveContainer" containerID="dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.843843 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897\": container with ID starting with dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897 not found: ID does not exist" containerID="dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843870 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897"} err="failed to get container status \"dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897\": rpc error: code = NotFound desc = could not find container \"dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897\": container with ID starting with dc215acebe0576f762ce8ba020df7d137160668cdfba69b7217db077b74a0897 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.843886 4941 scope.go:117] "RemoveContainer" containerID="30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.844136 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d\": container with ID starting with 30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d not found: ID does not exist" containerID="30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.844165 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d"} err="failed to get container status \"30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d\": rpc error: code = NotFound desc = could not find container \"30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d\": container with ID starting with 30511069ccf7b58173c3d960ff4ec8ffe8762c2cc955578dc10efebd0922ea8d not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.844181 4941 scope.go:117] "RemoveContainer" containerID="a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.844446 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893\": container with ID starting with a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893 not found: ID does not exist" containerID="a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.844471 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893"} err="failed to get container status \"a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893\": rpc error: code = NotFound desc = could not find container \"a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893\": container with ID starting with a663d389aff3333954b165910b6f05ace589ee33623fe1cbfcd6d0574894d893 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.844489 4941 scope.go:117] "RemoveContainer" containerID="878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5" Nov 30 07:09:29 crc kubenswrapper[4941]: E1130 07:09:29.844720 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5\": container with ID starting with 878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5 not found: ID does not exist" containerID="878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.844743 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5"} err="failed to get container status \"878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5\": rpc error: code = NotFound desc = could not find container \"878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5\": container with ID starting with 878c129cd3f9182099b2584be9828ecfe8e125e94fe39940cc0e7291030843a5 not found: ID does not exist" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.882288 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-slfpx_af96ea99-9953-4e58-8ecc-0999730fcaf9/ovs-vswitchd/0.log" Nov 30 07:09:29 crc kubenswrapper[4941]: I1130 07:09:29.883054 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058020 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flxgc\" (UniqueName: \"kubernetes.io/projected/af96ea99-9953-4e58-8ecc-0999730fcaf9-kube-api-access-flxgc\") pod \"af96ea99-9953-4e58-8ecc-0999730fcaf9\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058177 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-lib\") pod \"af96ea99-9953-4e58-8ecc-0999730fcaf9\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058212 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-run\") pod \"af96ea99-9953-4e58-8ecc-0999730fcaf9\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058236 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-lib" (OuterVolumeSpecName: "var-lib") pod "af96ea99-9953-4e58-8ecc-0999730fcaf9" (UID: "af96ea99-9953-4e58-8ecc-0999730fcaf9"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058268 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-run" (OuterVolumeSpecName: "var-run") pod "af96ea99-9953-4e58-8ecc-0999730fcaf9" (UID: "af96ea99-9953-4e58-8ecc-0999730fcaf9"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058362 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af96ea99-9953-4e58-8ecc-0999730fcaf9-scripts\") pod \"af96ea99-9953-4e58-8ecc-0999730fcaf9\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058406 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-log\") pod \"af96ea99-9953-4e58-8ecc-0999730fcaf9\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058447 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-etc-ovs\") pod \"af96ea99-9953-4e58-8ecc-0999730fcaf9\" (UID: \"af96ea99-9953-4e58-8ecc-0999730fcaf9\") " Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058544 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-log" (OuterVolumeSpecName: "var-log") pod "af96ea99-9953-4e58-8ecc-0999730fcaf9" (UID: "af96ea99-9953-4e58-8ecc-0999730fcaf9"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058627 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "af96ea99-9953-4e58-8ecc-0999730fcaf9" (UID: "af96ea99-9953-4e58-8ecc-0999730fcaf9"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058854 4941 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-log\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058888 4941 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-etc-ovs\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058896 4941 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-lib\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.058905 4941 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/af96ea99-9953-4e58-8ecc-0999730fcaf9-var-run\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.059643 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af96ea99-9953-4e58-8ecc-0999730fcaf9-scripts" (OuterVolumeSpecName: "scripts") pod "af96ea99-9953-4e58-8ecc-0999730fcaf9" (UID: "af96ea99-9953-4e58-8ecc-0999730fcaf9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.061654 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af96ea99-9953-4e58-8ecc-0999730fcaf9-kube-api-access-flxgc" (OuterVolumeSpecName: "kube-api-access-flxgc") pod "af96ea99-9953-4e58-8ecc-0999730fcaf9" (UID: "af96ea99-9953-4e58-8ecc-0999730fcaf9"). InnerVolumeSpecName "kube-api-access-flxgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.159873 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flxgc\" (UniqueName: \"kubernetes.io/projected/af96ea99-9953-4e58-8ecc-0999730fcaf9-kube-api-access-flxgc\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.159899 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af96ea99-9953-4e58-8ecc-0999730fcaf9-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.466434 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-slfpx_af96ea99-9953-4e58-8ecc-0999730fcaf9/ovs-vswitchd/0.log" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.468250 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-slfpx" event={"ID":"af96ea99-9953-4e58-8ecc-0999730fcaf9","Type":"ContainerDied","Data":"8a597546ac104c631a8abd11a04d992ca57a97a8f4c9e35b4d0ebd08223bd0a2"} Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.468350 4941 scope.go:117] "RemoveContainer" containerID="ecfadef48ef80c1d5585906f97bef87b96919fb7e40cdd371da2cdb98a274f8f" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.468720 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-slfpx" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.499164 4941 scope.go:117] "RemoveContainer" containerID="de673e98976c2ee622dab467f431e23c4fd76af4eda189345c8ca5c4b7ff33d9" Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.513643 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-slfpx"] Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.518799 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-slfpx"] Nov 30 07:09:30 crc kubenswrapper[4941]: I1130 07:09:30.542608 4941 scope.go:117] "RemoveContainer" containerID="1523a6a7a83aaaf7a71979e336c4ecae83bc030afc2723bba30b9e6b531966fa" Nov 30 07:09:31 crc kubenswrapper[4941]: I1130 07:09:31.535076 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" path="/var/lib/kubelet/pods/af96ea99-9953-4e58-8ecc-0999730fcaf9/volumes" Nov 30 07:09:31 crc kubenswrapper[4941]: I1130 07:09:31.597590 4941 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod14ce638b-1621-451a-80b6-0e13b6ffb734"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod14ce638b-1621-451a-80b6-0e13b6ffb734] : Timed out while waiting for systemd to remove kubepods-besteffort-pod14ce638b_1621_451a_80b6_0e13b6ffb734.slice" Nov 30 07:09:31 crc kubenswrapper[4941]: E1130 07:09:31.597660 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod14ce638b-1621-451a-80b6-0e13b6ffb734] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod14ce638b-1621-451a-80b6-0e13b6ffb734] : Timed out while waiting for systemd to remove kubepods-besteffort-pod14ce638b_1621_451a_80b6_0e13b6ffb734.slice" pod="openstack/ovsdbserver-sb-0" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" Nov 30 07:09:31 crc kubenswrapper[4941]: I1130 07:09:31.601029 4941 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podea6e32f6-a5d9-4b23-9588-2ea6be572e72"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podea6e32f6-a5d9-4b23-9588-2ea6be572e72] : Timed out while waiting for systemd to remove kubepods-besteffort-podea6e32f6_a5d9_4b23_9588_2ea6be572e72.slice" Nov 30 07:09:31 crc kubenswrapper[4941]: E1130 07:09:31.601089 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podea6e32f6-a5d9-4b23-9588-2ea6be572e72] : unable to destroy cgroup paths for cgroup [kubepods besteffort podea6e32f6-a5d9-4b23-9588-2ea6be572e72] : Timed out while waiting for systemd to remove kubepods-besteffort-podea6e32f6_a5d9_4b23_9588_2ea6be572e72.slice" pod="openstack/ovsdbserver-nb-0" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" Nov 30 07:09:31 crc kubenswrapper[4941]: I1130 07:09:31.603253 4941 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod5679f4ed-6882-4f85-93b2-02ccff357b48"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod5679f4ed-6882-4f85-93b2-02ccff357b48] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5679f4ed_6882_4f85_93b2_02ccff357b48.slice" Nov 30 07:09:31 crc kubenswrapper[4941]: E1130 07:09:31.603312 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod5679f4ed-6882-4f85-93b2-02ccff357b48] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod5679f4ed-6882-4f85-93b2-02ccff357b48] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5679f4ed_6882_4f85_93b2_02ccff357b48.slice" pod="openstack/openstackclient" podUID="5679f4ed-6882-4f85-93b2-02ccff357b48" Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.492042 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.492150 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.492150 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.532587 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.542754 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.550296 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 07:09:32 crc kubenswrapper[4941]: I1130 07:09:32.557078 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 07:09:33 crc kubenswrapper[4941]: I1130 07:09:33.532188 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" path="/var/lib/kubelet/pods/14ce638b-1621-451a-80b6-0e13b6ffb734/volumes" Nov 30 07:09:33 crc kubenswrapper[4941]: I1130 07:09:33.534107 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" path="/var/lib/kubelet/pods/ea6e32f6-a5d9-4b23-9588-2ea6be572e72/volumes" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.058421 4941 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a] : Timed out while waiting for systemd to remove kubepods-besteffort-pod74b8cb3f_4d3c_4ae3_a258_1ee9d4ffac9a.slice" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.256664 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.301502 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.360729 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mxqb6"] Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361071 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="cinder-scheduler" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361105 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="cinder-scheduler" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361124 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="rsync" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361130 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="rsync" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361141 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-metadata" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361207 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-metadata" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361218 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361224 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361236 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361242 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-server" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361250 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="probe" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361256 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="probe" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361265 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361270 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361282 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d229a913-5522-4197-be77-fad9a0187f74" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361287 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d229a913-5522-4197-be77-fad9a0187f74" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361295 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94cbff61-3614-4efd-b4ba-36bef65f2ae7" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361300 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="94cbff61-3614-4efd-b4ba-36bef65f2ae7" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361309 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361315 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-api" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361342 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361350 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361365 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25deaa20-8f61-4317-ad4a-11df9ddff2fe" containerName="keystone-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361374 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="25deaa20-8f61-4317-ad4a-11df9ddff2fe" containerName="keystone-api" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361386 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="setup-container" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361392 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="setup-container" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361404 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361410 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361421 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361428 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361441 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-notification-agent" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361450 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-notification-agent" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361462 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="ovsdbserver-nb" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361469 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="ovsdbserver-nb" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361484 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-updater" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361493 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-updater" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361502 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-expirer" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361510 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-expirer" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361526 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361535 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361542 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" containerName="nova-scheduler-scheduler" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361549 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" containerName="nova-scheduler-scheduler" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361559 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361566 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361577 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="galera" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361584 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="galera" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361594 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361601 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361614 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361622 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361633 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" containerName="memcached" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361641 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" containerName="memcached" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361650 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="ovsdbserver-sb" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361657 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="ovsdbserver-sb" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361671 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361677 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361687 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361696 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361705 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="sg-core" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361713 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="sg-core" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361726 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-updater" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361733 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-updater" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361741 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4c68094-a8b8-4327-9ae1-335226d3b938" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361748 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4c68094-a8b8-4327-9ae1-335226d3b938" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361759 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="ovn-northd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361767 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="ovn-northd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361778 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361786 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-api" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361799 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361806 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361821 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361830 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-server" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361844 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361852 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361865 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="proxy-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361872 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="proxy-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361885 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361892 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361906 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361913 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361922 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bb6c559-5c94-43b0-b6f0-3992652c720f" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361930 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bb6c559-5c94-43b0-b6f0-3992652c720f" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361944 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" containerName="nova-cell1-conductor-conductor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361952 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" containerName="nova-cell1-conductor-conductor" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361964 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa90de41-9166-475c-925a-3d79b02a694d" containerName="mysql-bootstrap" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361972 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa90de41-9166-475c-925a-3d79b02a694d" containerName="mysql-bootstrap" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.361986 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.361994 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362002 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362009 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362020 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362027 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-api" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362035 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f0e9278-1a7f-400f-8a07-31f88c84814b" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362043 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f0e9278-1a7f-400f-8a07-31f88c84814b" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362054 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362061 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362076 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362083 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362094 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="rabbitmq" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362104 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="rabbitmq" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362114 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" containerName="init" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362121 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" containerName="init" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362156 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server-init" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362163 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server-init" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362177 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362185 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362195 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362202 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362212 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362220 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-server" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362281 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88971618-54e2-4670-be08-a6ae63ed99df" containerName="nova-cell0-conductor-conductor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362290 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="88971618-54e2-4670-be08-a6ae63ed99df" containerName="nova-cell0-conductor-conductor" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362302 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362309 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362339 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="swift-recon-cron" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362349 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="swift-recon-cron" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362364 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="mysql-bootstrap" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362372 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="mysql-bootstrap" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362385 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa90de41-9166-475c-925a-3d79b02a694d" containerName="galera" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362393 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa90de41-9166-475c-925a-3d79b02a694d" containerName="galera" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362410 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362418 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362426 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f311bacd-2cef-44fe-95c4-38a7462cd4a6" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362434 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f311bacd-2cef-44fe-95c4-38a7462cd4a6" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362444 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362451 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362462 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a65953b8-4285-412b-9670-7747951a62ae" containerName="kube-state-metrics" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362470 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a65953b8-4285-412b-9670-7747951a62ae" containerName="kube-state-metrics" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362479 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362486 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362498 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362506 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362518 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="rabbitmq" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362524 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="rabbitmq" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362536 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" containerName="dnsmasq-dns" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362543 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" containerName="dnsmasq-dns" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362551 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-central-agent" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362560 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-central-agent" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362571 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362580 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362594 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362603 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362612 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362620 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-server" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362629 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-reaper" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362637 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-reaper" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362648 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362655 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener-log" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.362666 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="setup-container" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362674 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="setup-container" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362956 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362977 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovsdb-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.362991 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ac7adf-c7e3-4512-9c65-6361d005b4b7" containerName="nova-scheduler-scheduler" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363004 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="45978317-0f07-44da-8b74-fbaaec0e6105" containerName="rabbitmq" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363012 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363044 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363053 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363061 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-expirer" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363071 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363081 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363093 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-central-agent" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363129 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363141 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7a5ee5-1f0c-4819-a375-891a5e2cea03" containerName="rabbitmq" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363150 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-reaper" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363164 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363172 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="ovsdbserver-sb" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363208 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="af96ea99-9953-4e58-8ecc-0999730fcaf9" containerName="ovs-vswitchd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363220 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363232 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="sg-core" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363241 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="88971618-54e2-4670-be08-a6ae63ed99df" containerName="nova-cell0-conductor-conductor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363250 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363281 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363293 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363303 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d229a913-5522-4197-be77-fad9a0187f74" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363315 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363355 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4571a7ec-45e1-4c4b-a96a-b9841b3d89bc" containerName="nova-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363365 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ce638b-1621-451a-80b6-0e13b6ffb734" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363373 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363381 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="cinder-scheduler" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363393 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-updater" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363401 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="rsync" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363408 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="proxy-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363441 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363452 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa90de41-9166-475c-925a-3d79b02a694d" containerName="galera" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363461 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1822cd0b-b52d-49d7-b787-a1091edfc585" containerName="cinder-api-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363470 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a376fae9-3d2f-4247-b917-0d63e6f4a9da" containerName="glance-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363480 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e3c777d-2ebb-447f-a8a4-7fb17e59d3ce" containerName="nova-cell1-conductor-conductor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363487 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363520 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="22508ba9-7cfd-462a-9b94-3ee1d8c0a15b" containerName="ceilometer-notification-agent" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363530 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363544 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="74b8cb3f-4d3c-4ae3-a258-1ee9d4ffac9a" containerName="probe" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363556 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="25deaa20-8f61-4317-ad4a-11df9ddff2fe" containerName="keystone-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363565 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a573f7e0-ee6d-4847-a778-5f6ef41fd17f" containerName="glance-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363595 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea6e32f6-a5d9-4b23-9588-2ea6be572e72" containerName="ovsdbserver-nb" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363604 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bb6c559-5c94-43b0-b6f0-3992652c720f" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363615 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05c5cf3-bcb4-4307-a601-fbecde4f026b" containerName="memcached" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363622 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf0e4aae-888b-4df8-a6e2-19a5f04b9656" containerName="barbican-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363631 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="79ca3b29-7cdd-4923-a12c-2f350d4b8728" containerName="galera" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363639 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="674f83a3-0419-43d7-a679-fed1bf09b047" containerName="dnsmasq-dns" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363650 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="object-updater" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363686 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f311bacd-2cef-44fe-95c4-38a7462cd4a6" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363696 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a758cc8-4546-4982-b2a7-b7824ecfc118" containerName="placement-api" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363702 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363711 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="94cbff61-3614-4efd-b4ba-36bef65f2ae7" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363717 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363725 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="container-replicator" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363753 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f0e9278-1a7f-400f-8a07-31f88c84814b" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363763 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e01076ff-d267-4931-8788-47eee9ebfd76" containerName="ovn-controller" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363774 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="08d286da-d376-4b8a-8a7f-c1d22b5a7c3b" containerName="proxy-server" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363781 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dfbf8e6-60f7-47a0-9fee-3d532daf0503" containerName="neutron-httpd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363787 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="openstack-network-exporter" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363795 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="account-auditor" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363801 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3fdcd-0e0e-4133-9c6c-beb2ca3650ca" containerName="swift-recon-cron" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363810 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb74a593-764a-416b-897b-539bafb29c70" containerName="barbican-worker-log" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363820 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a65953b8-4285-412b-9670-7747951a62ae" containerName="kube-state-metrics" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363826 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4c68094-a8b8-4327-9ae1-335226d3b938" containerName="mariadb-account-delete" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363836 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="322fb449-5599-45af-97e2-158692366d9b" containerName="ovn-northd" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363842 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="126a91a7-8a81-40ef-87db-383ed37a26f4" containerName="barbican-keystone-listener" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.363852 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3315d9fd-71da-4f22-98d8-7142da896aab" containerName="nova-metadata-metadata" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.365127 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.367529 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mxqb6"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.428542 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f0e9278-1a7f-400f-8a07-31f88c84814b-operator-scripts\") pod \"4f0e9278-1a7f-400f-8a07-31f88c84814b\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.428971 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5q4b\" (UniqueName: \"kubernetes.io/projected/4f0e9278-1a7f-400f-8a07-31f88c84814b-kube-api-access-c5q4b\") pod \"4f0e9278-1a7f-400f-8a07-31f88c84814b\" (UID: \"4f0e9278-1a7f-400f-8a07-31f88c84814b\") " Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429045 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxfdd\" (UniqueName: \"kubernetes.io/projected/d229a913-5522-4197-be77-fad9a0187f74-kube-api-access-gxfdd\") pod \"d229a913-5522-4197-be77-fad9a0187f74\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429164 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d229a913-5522-4197-be77-fad9a0187f74-operator-scripts\") pod \"d229a913-5522-4197-be77-fad9a0187f74\" (UID: \"d229a913-5522-4197-be77-fad9a0187f74\") " Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429531 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pb6m\" (UniqueName: \"kubernetes.io/projected/8cca6106-e951-471e-9db9-baeb7e871d0f-kube-api-access-6pb6m\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429553 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d229a913-5522-4197-be77-fad9a0187f74-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d229a913-5522-4197-be77-fad9a0187f74" (UID: "d229a913-5522-4197-be77-fad9a0187f74"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429584 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-utilities\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429597 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f0e9278-1a7f-400f-8a07-31f88c84814b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f0e9278-1a7f-400f-8a07-31f88c84814b" (UID: "4f0e9278-1a7f-400f-8a07-31f88c84814b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429705 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-catalog-content\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429807 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d229a913-5522-4197-be77-fad9a0187f74-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.429823 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f0e9278-1a7f-400f-8a07-31f88c84814b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.434292 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f0e9278-1a7f-400f-8a07-31f88c84814b-kube-api-access-c5q4b" (OuterVolumeSpecName: "kube-api-access-c5q4b") pod "4f0e9278-1a7f-400f-8a07-31f88c84814b" (UID: "4f0e9278-1a7f-400f-8a07-31f88c84814b"). InnerVolumeSpecName "kube-api-access-c5q4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.436227 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d229a913-5522-4197-be77-fad9a0187f74-kube-api-access-gxfdd" (OuterVolumeSpecName: "kube-api-access-gxfdd") pod "d229a913-5522-4197-be77-fad9a0187f74" (UID: "d229a913-5522-4197-be77-fad9a0187f74"). InnerVolumeSpecName "kube-api-access-gxfdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.511759 4941 generic.go:334] "Generic (PLEG): container finished" podID="4f0e9278-1a7f-400f-8a07-31f88c84814b" containerID="2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85" exitCode=137 Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.511800 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement6f8e-account-delete-sd7xt" event={"ID":"4f0e9278-1a7f-400f-8a07-31f88c84814b","Type":"ContainerDied","Data":"2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85"} Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.511823 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement6f8e-account-delete-sd7xt" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.511842 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement6f8e-account-delete-sd7xt" event={"ID":"4f0e9278-1a7f-400f-8a07-31f88c84814b","Type":"ContainerDied","Data":"f8797662183715011d342c437f17c4bda401042a4d898590a761684b56bf35a1"} Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.511861 4941 scope.go:117] "RemoveContainer" containerID="2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.513506 4941 generic.go:334] "Generic (PLEG): container finished" podID="d229a913-5522-4197-be77-fad9a0187f74" containerID="f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d" exitCode=137 Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.513596 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancec0c5-account-delete-fx6st" event={"ID":"d229a913-5522-4197-be77-fad9a0187f74","Type":"ContainerDied","Data":"f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d"} Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.513628 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancec0c5-account-delete-fx6st" event={"ID":"d229a913-5522-4197-be77-fad9a0187f74","Type":"ContainerDied","Data":"b3e08258dc90511575b3ea5d9cdf2a7fdbf3b7c07341c97a47280c9caa8b90c7"} Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.513574 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancec0c5-account-delete-fx6st" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.530603 4941 scope.go:117] "RemoveContainer" containerID="2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.531155 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85\": container with ID starting with 2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85 not found: ID does not exist" containerID="2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.531207 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85"} err="failed to get container status \"2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85\": rpc error: code = NotFound desc = could not find container \"2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85\": container with ID starting with 2a9de8e711b725d474420e74ceb1e357a68743608d38c5626ce880b8ffd32d85 not found: ID does not exist" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.531236 4941 scope.go:117] "RemoveContainer" containerID="f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.531969 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pb6m\" (UniqueName: \"kubernetes.io/projected/8cca6106-e951-471e-9db9-baeb7e871d0f-kube-api-access-6pb6m\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.533280 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-utilities\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.533372 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-catalog-content\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.533517 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5q4b\" (UniqueName: \"kubernetes.io/projected/4f0e9278-1a7f-400f-8a07-31f88c84814b-kube-api-access-c5q4b\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.533535 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxfdd\" (UniqueName: \"kubernetes.io/projected/d229a913-5522-4197-be77-fad9a0187f74-kube-api-access-gxfdd\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.534770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-utilities\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.534845 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-catalog-content\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.555523 4941 scope.go:117] "RemoveContainer" containerID="f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d" Nov 30 07:09:34 crc kubenswrapper[4941]: E1130 07:09:34.558558 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d\": container with ID starting with f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d not found: ID does not exist" containerID="f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.558613 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d"} err="failed to get container status \"f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d\": rpc error: code = NotFound desc = could not find container \"f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d\": container with ID starting with f3379cc3984102506be0d089dfe7f36a3706dc1da4c1f884ae5a145adea4832d not found: ID does not exist" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.565195 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bqw2b"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.570611 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.574729 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqw2b"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.576852 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pb6m\" (UniqueName: \"kubernetes.io/projected/8cca6106-e951-471e-9db9-baeb7e871d0f-kube-api-access-6pb6m\") pod \"redhat-operators-mxqb6\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.594615 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement6f8e-account-delete-sd7xt"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.609123 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement6f8e-account-delete-sd7xt"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.620452 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancec0c5-account-delete-fx6st"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.628631 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glancec0c5-account-delete-fx6st"] Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.634534 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-utilities\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.634674 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-catalog-content\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.634808 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdx88\" (UniqueName: \"kubernetes.io/projected/7958d9f6-0097-464c-9a89-342d89cb8a2f-kube-api-access-mdx88\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.686448 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.735751 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-catalog-content\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.736031 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdx88\" (UniqueName: \"kubernetes.io/projected/7958d9f6-0097-464c-9a89-342d89cb8a2f-kube-api-access-mdx88\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.736125 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-utilities\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.736645 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-catalog-content\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.736728 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-utilities\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.759688 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdx88\" (UniqueName: \"kubernetes.io/projected/7958d9f6-0097-464c-9a89-342d89cb8a2f-kube-api-access-mdx88\") pod \"redhat-marketplace-bqw2b\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:34 crc kubenswrapper[4941]: I1130 07:09:34.902828 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.150900 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mxqb6"] Nov 30 07:09:35 crc kubenswrapper[4941]: W1130 07:09:35.247946 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cca6106_e951_471e_9db9_baeb7e871d0f.slice/crio-c6e00dba56d6e20e69951502844261b536b5665c225e82425a10554ef38aea7f WatchSource:0}: Error finding container c6e00dba56d6e20e69951502844261b536b5665c225e82425a10554ef38aea7f: Status 404 returned error can't find the container with id c6e00dba56d6e20e69951502844261b536b5665c225e82425a10554ef38aea7f Nov 30 07:09:35 crc kubenswrapper[4941]: E1130 07:09:35.257186 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c62ddf4_ab03_4aa9_968b_ed0a8898d367.slice/crio-a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5342339_2b4f_4cf7_b262_8e9ff9d41bb7.slice/crio-conmon-f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c62ddf4_ab03_4aa9_968b_ed0a8898d367.slice/crio-conmon-a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af.scope\": RecentStats: unable to find data in memory cache]" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.397435 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.445286 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqw2b"] Nov 30 07:09:35 crc kubenswrapper[4941]: W1130 07:09:35.503081 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7958d9f6_0097_464c_9a89_342d89cb8a2f.slice/crio-2fc16e93aaad0788fe22fda684355ef75de7448436b3c12a3334753969e860b2 WatchSource:0}: Error finding container 2fc16e93aaad0788fe22fda684355ef75de7448436b3c12a3334753969e860b2: Status 404 returned error can't find the container with id 2fc16e93aaad0788fe22fda684355ef75de7448436b3c12a3334753969e860b2 Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.514839 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.534051 4941 generic.go:334] "Generic (PLEG): container finished" podID="e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" containerID="f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082" exitCode=137 Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.534120 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00ef1-account-delete-jbdnp" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.535647 4941 generic.go:334] "Generic (PLEG): container finished" podID="4c62ddf4-ab03-4aa9-968b-ed0a8898d367" containerID="a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af" exitCode=137 Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.535776 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.536164 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f0e9278-1a7f-400f-8a07-31f88c84814b" path="/var/lib/kubelet/pods/4f0e9278-1a7f-400f-8a07-31f88c84814b/volumes" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.536741 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d229a913-5522-4197-be77-fad9a0187f74" path="/var/lib/kubelet/pods/d229a913-5522-4197-be77-fad9a0187f74/volumes" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.542315 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00ef1-account-delete-jbdnp" event={"ID":"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7","Type":"ContainerDied","Data":"f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.542397 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00ef1-account-delete-jbdnp" event={"ID":"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7","Type":"ContainerDied","Data":"223c9af50ec252479a9284daf0c400d42370d25dba2a0f8ddfddeb6d1309e07f"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.542412 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi2ee8-account-delete-jr5gz" event={"ID":"4c62ddf4-ab03-4aa9-968b-ed0a8898d367","Type":"ContainerDied","Data":"a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.542426 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqw2b" event={"ID":"7958d9f6-0097-464c-9a89-342d89cb8a2f","Type":"ContainerStarted","Data":"2fc16e93aaad0788fe22fda684355ef75de7448436b3c12a3334753969e860b2"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.542453 4941 scope.go:117] "RemoveContainer" containerID="f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.543351 4941 generic.go:334] "Generic (PLEG): container finished" podID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerID="713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0" exitCode=0 Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.543415 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mxqb6" event={"ID":"8cca6106-e951-471e-9db9-baeb7e871d0f","Type":"ContainerDied","Data":"713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.543445 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mxqb6" event={"ID":"8cca6106-e951-471e-9db9-baeb7e871d0f","Type":"ContainerStarted","Data":"c6e00dba56d6e20e69951502844261b536b5665c225e82425a10554ef38aea7f"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.558671 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmnl6\" (UniqueName: \"kubernetes.io/projected/bad7c3f7-8cec-4baf-808a-43184771d1da-kube-api-access-fmnl6\") pod \"bad7c3f7-8cec-4baf-808a-43184771d1da\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.558940 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bad7c3f7-8cec-4baf-808a-43184771d1da-operator-scripts\") pod \"bad7c3f7-8cec-4baf-808a-43184771d1da\" (UID: \"bad7c3f7-8cec-4baf-808a-43184771d1da\") " Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.561803 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bad7c3f7-8cec-4baf-808a-43184771d1da-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bad7c3f7-8cec-4baf-808a-43184771d1da" (UID: "bad7c3f7-8cec-4baf-808a-43184771d1da"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.570835 4941 generic.go:334] "Generic (PLEG): container finished" podID="bad7c3f7-8cec-4baf-808a-43184771d1da" containerID="cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e" exitCode=137 Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.570896 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona2cc-account-delete-262pl" event={"ID":"bad7c3f7-8cec-4baf-808a-43184771d1da","Type":"ContainerDied","Data":"cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.570922 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona2cc-account-delete-262pl" event={"ID":"bad7c3f7-8cec-4baf-808a-43184771d1da","Type":"ContainerDied","Data":"2a3f44b675ce7e85ac120a43b22ce7da214a53cfc5f79b6a27bd94f7110107e6"} Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.570973 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona2cc-account-delete-262pl" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.571115 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bad7c3f7-8cec-4baf-808a-43184771d1da-kube-api-access-fmnl6" (OuterVolumeSpecName: "kube-api-access-fmnl6") pod "bad7c3f7-8cec-4baf-808a-43184771d1da" (UID: "bad7c3f7-8cec-4baf-808a-43184771d1da"). InnerVolumeSpecName "kube-api-access-fmnl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.606275 4941 scope.go:117] "RemoveContainer" containerID="f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082" Nov 30 07:09:35 crc kubenswrapper[4941]: E1130 07:09:35.610697 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082\": container with ID starting with f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082 not found: ID does not exist" containerID="f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.610741 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082"} err="failed to get container status \"f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082\": rpc error: code = NotFound desc = could not find container \"f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082\": container with ID starting with f0dc64e418242b23f4f704f0196785fa3cb3fd619256f15206814d82b6f1e082 not found: ID does not exist" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.610769 4941 scope.go:117] "RemoveContainer" containerID="a8397a6565b65f4dcbf9cc9eeeb676b2bc40c04a5d9bbc034d755ede612546af" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.658662 4941 scope.go:117] "RemoveContainer" containerID="cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.660724 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6br9\" (UniqueName: \"kubernetes.io/projected/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-kube-api-access-f6br9\") pod \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.660780 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts\") pod \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.660856 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsb9w\" (UniqueName: \"kubernetes.io/projected/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-kube-api-access-wsb9w\") pod \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\" (UID: \"4c62ddf4-ab03-4aa9-968b-ed0a8898d367\") " Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.660888 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-operator-scripts\") pod \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\" (UID: \"e5342339-2b4f-4cf7-b262-8e9ff9d41bb7\") " Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.661271 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmnl6\" (UniqueName: \"kubernetes.io/projected/bad7c3f7-8cec-4baf-808a-43184771d1da-kube-api-access-fmnl6\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.661283 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bad7c3f7-8cec-4baf-808a-43184771d1da-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.661848 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4c62ddf4-ab03-4aa9-968b-ed0a8898d367" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.662541 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" (UID: "e5342339-2b4f-4cf7-b262-8e9ff9d41bb7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.664059 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-kube-api-access-f6br9" (OuterVolumeSpecName: "kube-api-access-f6br9") pod "e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" (UID: "e5342339-2b4f-4cf7-b262-8e9ff9d41bb7"). InnerVolumeSpecName "kube-api-access-f6br9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.664306 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-kube-api-access-wsb9w" (OuterVolumeSpecName: "kube-api-access-wsb9w") pod "4c62ddf4-ab03-4aa9-968b-ed0a8898d367" (UID: "4c62ddf4-ab03-4aa9-968b-ed0a8898d367"). InnerVolumeSpecName "kube-api-access-wsb9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.718409 4941 scope.go:117] "RemoveContainer" containerID="cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e" Nov 30 07:09:35 crc kubenswrapper[4941]: E1130 07:09:35.719260 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e\": container with ID starting with cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e not found: ID does not exist" containerID="cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.719300 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e"} err="failed to get container status \"cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e\": rpc error: code = NotFound desc = could not find container \"cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e\": container with ID starting with cb3c0f4cdb78522b79275380c03e51349df3a8eafa123393fbdeee31c2ba158e not found: ID does not exist" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.762766 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.762791 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6br9\" (UniqueName: \"kubernetes.io/projected/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7-kube-api-access-f6br9\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.762802 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.762812 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsb9w\" (UniqueName: \"kubernetes.io/projected/4c62ddf4-ab03-4aa9-968b-ed0a8898d367-kube-api-access-wsb9w\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.867156 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell00ef1-account-delete-jbdnp"] Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.872476 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell00ef1-account-delete-jbdnp"] Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.900938 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutrona2cc-account-delete-262pl"] Nov 30 07:09:35 crc kubenswrapper[4941]: I1130 07:09:35.915810 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutrona2cc-account-delete-262pl"] Nov 30 07:09:36 crc kubenswrapper[4941]: I1130 07:09:36.598762 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi2ee8-account-delete-jr5gz" Nov 30 07:09:36 crc kubenswrapper[4941]: I1130 07:09:36.598815 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi2ee8-account-delete-jr5gz" event={"ID":"4c62ddf4-ab03-4aa9-968b-ed0a8898d367","Type":"ContainerDied","Data":"8867cf0276a5ab29363c349c6244c2469dee9f6f9d24e1feff654f37861bb6a7"} Nov 30 07:09:36 crc kubenswrapper[4941]: I1130 07:09:36.602661 4941 generic.go:334] "Generic (PLEG): container finished" podID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerID="22fbfc263584fba263de8b2f18a1ca6be0085ea043dbc49b276d8b27758f8086" exitCode=0 Nov 30 07:09:36 crc kubenswrapper[4941]: I1130 07:09:36.602881 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqw2b" event={"ID":"7958d9f6-0097-464c-9a89-342d89cb8a2f","Type":"ContainerDied","Data":"22fbfc263584fba263de8b2f18a1ca6be0085ea043dbc49b276d8b27758f8086"} Nov 30 07:09:36 crc kubenswrapper[4941]: I1130 07:09:36.678032 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi2ee8-account-delete-jr5gz"] Nov 30 07:09:36 crc kubenswrapper[4941]: I1130 07:09:36.691431 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi2ee8-account-delete-jr5gz"] Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.531763 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c62ddf4-ab03-4aa9-968b-ed0a8898d367" path="/var/lib/kubelet/pods/4c62ddf4-ab03-4aa9-968b-ed0a8898d367/volumes" Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.532252 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bad7c3f7-8cec-4baf-808a-43184771d1da" path="/var/lib/kubelet/pods/bad7c3f7-8cec-4baf-808a-43184771d1da/volumes" Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.532756 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" path="/var/lib/kubelet/pods/e5342339-2b4f-4cf7-b262-8e9ff9d41bb7/volumes" Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.618739 4941 generic.go:334] "Generic (PLEG): container finished" podID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerID="cb3a5a10458c9934b377d30f5904dad43dae76db92cb207c0b340db61a147c79" exitCode=0 Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.618805 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqw2b" event={"ID":"7958d9f6-0097-464c-9a89-342d89cb8a2f","Type":"ContainerDied","Data":"cb3a5a10458c9934b377d30f5904dad43dae76db92cb207c0b340db61a147c79"} Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.621926 4941 generic.go:334] "Generic (PLEG): container finished" podID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerID="07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b" exitCode=0 Nov 30 07:09:37 crc kubenswrapper[4941]: I1130 07:09:37.621954 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mxqb6" event={"ID":"8cca6106-e951-471e-9db9-baeb7e871d0f","Type":"ContainerDied","Data":"07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b"} Nov 30 07:09:38 crc kubenswrapper[4941]: I1130 07:09:38.636275 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqw2b" event={"ID":"7958d9f6-0097-464c-9a89-342d89cb8a2f","Type":"ContainerStarted","Data":"62a2ab98db72767d4237583f353f80e8a901f08a2992282e9dcabfc8fa66f984"} Nov 30 07:09:38 crc kubenswrapper[4941]: I1130 07:09:38.639261 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mxqb6" event={"ID":"8cca6106-e951-471e-9db9-baeb7e871d0f","Type":"ContainerStarted","Data":"a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb"} Nov 30 07:09:38 crc kubenswrapper[4941]: I1130 07:09:38.661911 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bqw2b" podStartSLOduration=3.028067796 podStartE2EDuration="4.661883226s" podCreationTimestamp="2025-11-30 07:09:34 +0000 UTC" firstStartedPulling="2025-11-30 07:09:36.608229057 +0000 UTC m=+1397.376400666" lastFinishedPulling="2025-11-30 07:09:38.242044497 +0000 UTC m=+1399.010216096" observedRunningTime="2025-11-30 07:09:38.658532344 +0000 UTC m=+1399.426703963" watchObservedRunningTime="2025-11-30 07:09:38.661883226 +0000 UTC m=+1399.430054845" Nov 30 07:09:38 crc kubenswrapper[4941]: I1130 07:09:38.697220 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mxqb6" podStartSLOduration=2.223722891 podStartE2EDuration="4.697195208s" podCreationTimestamp="2025-11-30 07:09:34 +0000 UTC" firstStartedPulling="2025-11-30 07:09:35.563830509 +0000 UTC m=+1396.332002118" lastFinishedPulling="2025-11-30 07:09:38.037302806 +0000 UTC m=+1398.805474435" observedRunningTime="2025-11-30 07:09:38.690562815 +0000 UTC m=+1399.458734424" watchObservedRunningTime="2025-11-30 07:09:38.697195208 +0000 UTC m=+1399.465366817" Nov 30 07:09:44 crc kubenswrapper[4941]: I1130 07:09:44.691643 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:44 crc kubenswrapper[4941]: I1130 07:09:44.692137 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:44 crc kubenswrapper[4941]: I1130 07:09:44.753882 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:44 crc kubenswrapper[4941]: I1130 07:09:44.904078 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:44 crc kubenswrapper[4941]: I1130 07:09:44.904167 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:44 crc kubenswrapper[4941]: I1130 07:09:44.968291 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:45 crc kubenswrapper[4941]: I1130 07:09:45.747299 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:45 crc kubenswrapper[4941]: I1130 07:09:45.762695 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:46 crc kubenswrapper[4941]: I1130 07:09:46.005466 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqw2b"] Nov 30 07:09:47 crc kubenswrapper[4941]: I1130 07:09:47.723768 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bqw2b" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="registry-server" containerID="cri-o://62a2ab98db72767d4237583f353f80e8a901f08a2992282e9dcabfc8fa66f984" gracePeriod=2 Nov 30 07:09:48 crc kubenswrapper[4941]: I1130 07:09:48.205801 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mxqb6"] Nov 30 07:09:48 crc kubenswrapper[4941]: I1130 07:09:48.206182 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mxqb6" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="registry-server" containerID="cri-o://a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb" gracePeriod=2 Nov 30 07:09:49 crc kubenswrapper[4941]: I1130 07:09:49.743537 4941 generic.go:334] "Generic (PLEG): container finished" podID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerID="62a2ab98db72767d4237583f353f80e8a901f08a2992282e9dcabfc8fa66f984" exitCode=0 Nov 30 07:09:49 crc kubenswrapper[4941]: I1130 07:09:49.743597 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqw2b" event={"ID":"7958d9f6-0097-464c-9a89-342d89cb8a2f","Type":"ContainerDied","Data":"62a2ab98db72767d4237583f353f80e8a901f08a2992282e9dcabfc8fa66f984"} Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.466494 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.610462 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pb6m\" (UniqueName: \"kubernetes.io/projected/8cca6106-e951-471e-9db9-baeb7e871d0f-kube-api-access-6pb6m\") pod \"8cca6106-e951-471e-9db9-baeb7e871d0f\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.610606 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-catalog-content\") pod \"8cca6106-e951-471e-9db9-baeb7e871d0f\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.610671 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-utilities\") pod \"8cca6106-e951-471e-9db9-baeb7e871d0f\" (UID: \"8cca6106-e951-471e-9db9-baeb7e871d0f\") " Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.612844 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-utilities" (OuterVolumeSpecName: "utilities") pod "8cca6106-e951-471e-9db9-baeb7e871d0f" (UID: "8cca6106-e951-471e-9db9-baeb7e871d0f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.659707 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cca6106-e951-471e-9db9-baeb7e871d0f-kube-api-access-6pb6m" (OuterVolumeSpecName: "kube-api-access-6pb6m") pod "8cca6106-e951-471e-9db9-baeb7e871d0f" (UID: "8cca6106-e951-471e-9db9-baeb7e871d0f"). InnerVolumeSpecName "kube-api-access-6pb6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.712162 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pb6m\" (UniqueName: \"kubernetes.io/projected/8cca6106-e951-471e-9db9-baeb7e871d0f-kube-api-access-6pb6m\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.712400 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.745238 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cca6106-e951-471e-9db9-baeb7e871d0f" (UID: "8cca6106-e951-471e-9db9-baeb7e871d0f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.756999 4941 generic.go:334] "Generic (PLEG): container finished" podID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerID="a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb" exitCode=0 Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.757071 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mxqb6" event={"ID":"8cca6106-e951-471e-9db9-baeb7e871d0f","Type":"ContainerDied","Data":"a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb"} Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.757122 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mxqb6" event={"ID":"8cca6106-e951-471e-9db9-baeb7e871d0f","Type":"ContainerDied","Data":"c6e00dba56d6e20e69951502844261b536b5665c225e82425a10554ef38aea7f"} Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.757144 4941 scope.go:117] "RemoveContainer" containerID="a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.757425 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mxqb6" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.779846 4941 scope.go:117] "RemoveContainer" containerID="07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.794961 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mxqb6"] Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.800002 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mxqb6"] Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.814294 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cca6106-e951-471e-9db9-baeb7e871d0f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.824784 4941 scope.go:117] "RemoveContainer" containerID="713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.864476 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.876430 4941 scope.go:117] "RemoveContainer" containerID="a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb" Nov 30 07:09:50 crc kubenswrapper[4941]: E1130 07:09:50.876938 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb\": container with ID starting with a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb not found: ID does not exist" containerID="a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.876970 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb"} err="failed to get container status \"a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb\": rpc error: code = NotFound desc = could not find container \"a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb\": container with ID starting with a62b70f360a70a78511916569358c3117a3cadf6b9adcce9e3f1e6b4d8620dfb not found: ID does not exist" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.876997 4941 scope.go:117] "RemoveContainer" containerID="07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b" Nov 30 07:09:50 crc kubenswrapper[4941]: E1130 07:09:50.877839 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b\": container with ID starting with 07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b not found: ID does not exist" containerID="07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.877903 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b"} err="failed to get container status \"07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b\": rpc error: code = NotFound desc = could not find container \"07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b\": container with ID starting with 07b1662ce92cd1f28dc0697e39de3c43abff4a91253d2bfcb96541b5cfeb216b not found: ID does not exist" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.877949 4941 scope.go:117] "RemoveContainer" containerID="713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0" Nov 30 07:09:50 crc kubenswrapper[4941]: E1130 07:09:50.878283 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0\": container with ID starting with 713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0 not found: ID does not exist" containerID="713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0" Nov 30 07:09:50 crc kubenswrapper[4941]: I1130 07:09:50.878308 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0"} err="failed to get container status \"713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0\": rpc error: code = NotFound desc = could not find container \"713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0\": container with ID starting with 713e84d7ffcf7a9a78c9edd78efcf41840ad4d711170708cb86f8a2dd6b3b9a0 not found: ID does not exist" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.017194 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-utilities\") pod \"7958d9f6-0097-464c-9a89-342d89cb8a2f\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.017252 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdx88\" (UniqueName: \"kubernetes.io/projected/7958d9f6-0097-464c-9a89-342d89cb8a2f-kube-api-access-mdx88\") pod \"7958d9f6-0097-464c-9a89-342d89cb8a2f\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.017453 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-catalog-content\") pod \"7958d9f6-0097-464c-9a89-342d89cb8a2f\" (UID: \"7958d9f6-0097-464c-9a89-342d89cb8a2f\") " Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.018177 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-utilities" (OuterVolumeSpecName: "utilities") pod "7958d9f6-0097-464c-9a89-342d89cb8a2f" (UID: "7958d9f6-0097-464c-9a89-342d89cb8a2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.021812 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7958d9f6-0097-464c-9a89-342d89cb8a2f-kube-api-access-mdx88" (OuterVolumeSpecName: "kube-api-access-mdx88") pod "7958d9f6-0097-464c-9a89-342d89cb8a2f" (UID: "7958d9f6-0097-464c-9a89-342d89cb8a2f"). InnerVolumeSpecName "kube-api-access-mdx88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.035787 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7958d9f6-0097-464c-9a89-342d89cb8a2f" (UID: "7958d9f6-0097-464c-9a89-342d89cb8a2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.119553 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.119589 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdx88\" (UniqueName: \"kubernetes.io/projected/7958d9f6-0097-464c-9a89-342d89cb8a2f-kube-api-access-mdx88\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.119602 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7958d9f6-0097-464c-9a89-342d89cb8a2f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.530520 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" path="/var/lib/kubelet/pods/8cca6106-e951-471e-9db9-baeb7e871d0f/volumes" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.770311 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqw2b" event={"ID":"7958d9f6-0097-464c-9a89-342d89cb8a2f","Type":"ContainerDied","Data":"2fc16e93aaad0788fe22fda684355ef75de7448436b3c12a3334753969e860b2"} Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.770375 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqw2b" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.770411 4941 scope.go:117] "RemoveContainer" containerID="62a2ab98db72767d4237583f353f80e8a901f08a2992282e9dcabfc8fa66f984" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.798310 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqw2b"] Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.804309 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqw2b"] Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.810940 4941 scope.go:117] "RemoveContainer" containerID="cb3a5a10458c9934b377d30f5904dad43dae76db92cb207c0b340db61a147c79" Nov 30 07:09:51 crc kubenswrapper[4941]: I1130 07:09:51.832539 4941 scope.go:117] "RemoveContainer" containerID="22fbfc263584fba263de8b2f18a1ca6be0085ea043dbc49b276d8b27758f8086" Nov 30 07:09:53 crc kubenswrapper[4941]: I1130 07:09:53.612631 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" path="/var/lib/kubelet/pods/7958d9f6-0097-464c-9a89-342d89cb8a2f/volumes" Nov 30 07:10:02 crc kubenswrapper[4941]: I1130 07:10:02.978790 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:10:02 crc kubenswrapper[4941]: I1130 07:10:02.979439 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:10:32 crc kubenswrapper[4941]: I1130 07:10:32.978254 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:10:32 crc kubenswrapper[4941]: I1130 07:10:32.978781 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:10:35 crc kubenswrapper[4941]: I1130 07:10:35.956658 4941 scope.go:117] "RemoveContainer" containerID="c8dea3d901536f92143b2d0853186bb74f8eb40c02c82dcf71d84565f2e4dbc7" Nov 30 07:10:35 crc kubenswrapper[4941]: I1130 07:10:35.984999 4941 scope.go:117] "RemoveContainer" containerID="e84f6dd36a19f12cd165d9c7cbfb94fb5ea22319817c0302e9b4c7b1ddb13109" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.006352 4941 scope.go:117] "RemoveContainer" containerID="8ddf483285cd2430d6cec8c7625c079637a8728f27cdd14f85211a9e2e621640" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.045057 4941 scope.go:117] "RemoveContainer" containerID="6620a3aa486742d5043cee90f84c857185f75eb6aa26481d3b5919db990ebcf2" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.073188 4941 scope.go:117] "RemoveContainer" containerID="2c3d7dc00d9636d871b92937c391e1fe53cfe7256b79a79c00f7b0ed2afdc5b4" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.095530 4941 scope.go:117] "RemoveContainer" containerID="5c6f3a2a19c5ce5cd94327bc758bf13a86b7ae51c23f1c360ae9657455e0503b" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.130341 4941 scope.go:117] "RemoveContainer" containerID="0699e9d89734106167f49496bec6872e386a90ae7040eadeef3df97ab4bf7530" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.158570 4941 scope.go:117] "RemoveContainer" containerID="acede44e91df8abe2ec238513573f98b392bd74f31e0181fc783a56bd22a6f4e" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.182427 4941 scope.go:117] "RemoveContainer" containerID="264d6282f2a198345fd9841d0e85c31aa2cff83866d86f1cfa4155e9d969338d" Nov 30 07:10:36 crc kubenswrapper[4941]: I1130 07:10:36.211100 4941 scope.go:117] "RemoveContainer" containerID="ef1eef5d4d1e7312fddf801ca1f92d70d30764509fec5b54de267fa957a12e8b" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.626770 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cg4fb"] Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.629969 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="extract-utilities" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.629994 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="extract-utilities" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630013 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="extract-utilities" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630022 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="extract-utilities" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630034 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="extract-content" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630041 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="extract-content" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630053 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="registry-server" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630059 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="registry-server" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630075 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bad7c3f7-8cec-4baf-808a-43184771d1da" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630083 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bad7c3f7-8cec-4baf-808a-43184771d1da" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630097 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630103 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630114 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="extract-content" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630120 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="extract-content" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630166 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c62ddf4-ab03-4aa9-968b-ed0a8898d367" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630172 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c62ddf4-ab03-4aa9-968b-ed0a8898d367" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: E1130 07:10:57.630185 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="registry-server" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630191 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="registry-server" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630344 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="bad7c3f7-8cec-4baf-808a-43184771d1da" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630359 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c62ddf4-ab03-4aa9-968b-ed0a8898d367" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630373 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5342339-2b4f-4cf7-b262-8e9ff9d41bb7" containerName="mariadb-account-delete" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630383 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cca6106-e951-471e-9db9-baeb7e871d0f" containerName="registry-server" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.630394 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7958d9f6-0097-464c-9a89-342d89cb8a2f" containerName="registry-server" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.632108 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.645678 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg4fb"] Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.721218 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b756j\" (UniqueName: \"kubernetes.io/projected/dc391fad-c4e5-4cae-8eff-2d50542e0395-kube-api-access-b756j\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.721311 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-utilities\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.721391 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-catalog-content\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.822076 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-catalog-content\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.822190 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b756j\" (UniqueName: \"kubernetes.io/projected/dc391fad-c4e5-4cae-8eff-2d50542e0395-kube-api-access-b756j\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.822242 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-utilities\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.822770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-catalog-content\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.822792 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-utilities\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.847466 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b756j\" (UniqueName: \"kubernetes.io/projected/dc391fad-c4e5-4cae-8eff-2d50542e0395-kube-api-access-b756j\") pod \"community-operators-cg4fb\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:57 crc kubenswrapper[4941]: I1130 07:10:57.956009 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:10:58 crc kubenswrapper[4941]: I1130 07:10:58.441070 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cg4fb"] Nov 30 07:10:59 crc kubenswrapper[4941]: I1130 07:10:59.437409 4941 generic.go:334] "Generic (PLEG): container finished" podID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerID="a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd" exitCode=0 Nov 30 07:10:59 crc kubenswrapper[4941]: I1130 07:10:59.437501 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerDied","Data":"a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd"} Nov 30 07:10:59 crc kubenswrapper[4941]: I1130 07:10:59.437760 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerStarted","Data":"a8c28d73949c5bdd731cb0a5d7655788e1fc8f9f11ea2e23e30ba0907333b0e1"} Nov 30 07:11:00 crc kubenswrapper[4941]: I1130 07:11:00.450196 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerStarted","Data":"14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581"} Nov 30 07:11:01 crc kubenswrapper[4941]: I1130 07:11:01.463886 4941 generic.go:334] "Generic (PLEG): container finished" podID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerID="14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581" exitCode=0 Nov 30 07:11:01 crc kubenswrapper[4941]: I1130 07:11:01.463945 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerDied","Data":"14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581"} Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.473035 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerStarted","Data":"230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8"} Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.503417 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cg4fb" podStartSLOduration=3.013729247 podStartE2EDuration="5.50338603s" podCreationTimestamp="2025-11-30 07:10:57 +0000 UTC" firstStartedPulling="2025-11-30 07:10:59.440530141 +0000 UTC m=+1480.208701750" lastFinishedPulling="2025-11-30 07:11:01.930186914 +0000 UTC m=+1482.698358533" observedRunningTime="2025-11-30 07:11:02.494521999 +0000 UTC m=+1483.262693648" watchObservedRunningTime="2025-11-30 07:11:02.50338603 +0000 UTC m=+1483.271557649" Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.978904 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.979308 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.979416 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.980215 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9fa56f4fe3d0a299069614d024c15c66b787f4d645343ae4c789d83f64a98208"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:11:02 crc kubenswrapper[4941]: I1130 07:11:02.980291 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://9fa56f4fe3d0a299069614d024c15c66b787f4d645343ae4c789d83f64a98208" gracePeriod=600 Nov 30 07:11:03 crc kubenswrapper[4941]: I1130 07:11:03.482903 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="9fa56f4fe3d0a299069614d024c15c66b787f4d645343ae4c789d83f64a98208" exitCode=0 Nov 30 07:11:03 crc kubenswrapper[4941]: I1130 07:11:03.482970 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"9fa56f4fe3d0a299069614d024c15c66b787f4d645343ae4c789d83f64a98208"} Nov 30 07:11:03 crc kubenswrapper[4941]: I1130 07:11:03.483295 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1"} Nov 30 07:11:03 crc kubenswrapper[4941]: I1130 07:11:03.483316 4941 scope.go:117] "RemoveContainer" containerID="03f6ba41bf367eb3ad6a0ca9a42efb4ebf757994a2964d83030fd8e83c2c7d32" Nov 30 07:11:07 crc kubenswrapper[4941]: I1130 07:11:07.957559 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:11:07 crc kubenswrapper[4941]: I1130 07:11:07.958389 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:11:08 crc kubenswrapper[4941]: I1130 07:11:08.026037 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:11:08 crc kubenswrapper[4941]: I1130 07:11:08.596597 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:11:08 crc kubenswrapper[4941]: I1130 07:11:08.662873 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg4fb"] Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.552284 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cg4fb" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="registry-server" containerID="cri-o://230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8" gracePeriod=2 Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.962319 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.974841 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b756j\" (UniqueName: \"kubernetes.io/projected/dc391fad-c4e5-4cae-8eff-2d50542e0395-kube-api-access-b756j\") pod \"dc391fad-c4e5-4cae-8eff-2d50542e0395\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.974890 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-utilities\") pod \"dc391fad-c4e5-4cae-8eff-2d50542e0395\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.974942 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-catalog-content\") pod \"dc391fad-c4e5-4cae-8eff-2d50542e0395\" (UID: \"dc391fad-c4e5-4cae-8eff-2d50542e0395\") " Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.982783 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-utilities" (OuterVolumeSpecName: "utilities") pod "dc391fad-c4e5-4cae-8eff-2d50542e0395" (UID: "dc391fad-c4e5-4cae-8eff-2d50542e0395"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:11:10 crc kubenswrapper[4941]: I1130 07:11:10.989635 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc391fad-c4e5-4cae-8eff-2d50542e0395-kube-api-access-b756j" (OuterVolumeSpecName: "kube-api-access-b756j") pod "dc391fad-c4e5-4cae-8eff-2d50542e0395" (UID: "dc391fad-c4e5-4cae-8eff-2d50542e0395"). InnerVolumeSpecName "kube-api-access-b756j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.080371 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.080445 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b756j\" (UniqueName: \"kubernetes.io/projected/dc391fad-c4e5-4cae-8eff-2d50542e0395-kube-api-access-b756j\") on node \"crc\" DevicePath \"\"" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.115427 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc391fad-c4e5-4cae-8eff-2d50542e0395" (UID: "dc391fad-c4e5-4cae-8eff-2d50542e0395"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.181262 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc391fad-c4e5-4cae-8eff-2d50542e0395-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.564433 4941 generic.go:334] "Generic (PLEG): container finished" podID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerID="230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8" exitCode=0 Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.564471 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cg4fb" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.564489 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerDied","Data":"230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8"} Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.564568 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cg4fb" event={"ID":"dc391fad-c4e5-4cae-8eff-2d50542e0395","Type":"ContainerDied","Data":"a8c28d73949c5bdd731cb0a5d7655788e1fc8f9f11ea2e23e30ba0907333b0e1"} Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.564594 4941 scope.go:117] "RemoveContainer" containerID="230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.593547 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cg4fb"] Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.597868 4941 scope.go:117] "RemoveContainer" containerID="14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.598762 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cg4fb"] Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.624962 4941 scope.go:117] "RemoveContainer" containerID="a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.650656 4941 scope.go:117] "RemoveContainer" containerID="230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8" Nov 30 07:11:11 crc kubenswrapper[4941]: E1130 07:11:11.651935 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8\": container with ID starting with 230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8 not found: ID does not exist" containerID="230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.652003 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8"} err="failed to get container status \"230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8\": rpc error: code = NotFound desc = could not find container \"230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8\": container with ID starting with 230dccb8621c7e872a1f04d83ae6528bf66fe67b2e5c8f9a5eb442759dc114d8 not found: ID does not exist" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.652061 4941 scope.go:117] "RemoveContainer" containerID="14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581" Nov 30 07:11:11 crc kubenswrapper[4941]: E1130 07:11:11.652632 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581\": container with ID starting with 14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581 not found: ID does not exist" containerID="14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.652678 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581"} err="failed to get container status \"14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581\": rpc error: code = NotFound desc = could not find container \"14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581\": container with ID starting with 14dd7471172f19fd1b229286d375af379280edbe6de2af1e9044e2da0e0d4581 not found: ID does not exist" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.652709 4941 scope.go:117] "RemoveContainer" containerID="a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd" Nov 30 07:11:11 crc kubenswrapper[4941]: E1130 07:11:11.653005 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd\": container with ID starting with a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd not found: ID does not exist" containerID="a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd" Nov 30 07:11:11 crc kubenswrapper[4941]: I1130 07:11:11.653040 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd"} err="failed to get container status \"a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd\": rpc error: code = NotFound desc = could not find container \"a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd\": container with ID starting with a2423ae6e974180dae5741f44920734b9d7b484f92b019661478a57f1f06fcdd not found: ID does not exist" Nov 30 07:11:13 crc kubenswrapper[4941]: I1130 07:11:13.539820 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" path="/var/lib/kubelet/pods/dc391fad-c4e5-4cae-8eff-2d50542e0395/volumes" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.400572 4941 scope.go:117] "RemoveContainer" containerID="a9f3b9a4af1954fccf875c6a505fd0e6cd606fbf36e441a102a4d4d71fd40774" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.439421 4941 scope.go:117] "RemoveContainer" containerID="6b905a1a1127e47f5289ba8b21eee56a00d5a168712f6b2da0f2d56730a1e613" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.490092 4941 scope.go:117] "RemoveContainer" containerID="7e60a0a05344af48ff485b4a07f2c2c2df39e813faeed0cd0ad52d95c70eb25d" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.529797 4941 scope.go:117] "RemoveContainer" containerID="698a808256a2194b65e6223885ab5d564e66738858d71ad22d9e0b5a59262e12" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.548255 4941 scope.go:117] "RemoveContainer" containerID="466cbbd6f33788f86483c8f7ea35221c9274dc72718366bea0cfb6536d39f3c0" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.578039 4941 scope.go:117] "RemoveContainer" containerID="899456bfa6431f142919e5f8d16f0caeb94ab9bc705c9185b958c225ad7fdbcd" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.598413 4941 scope.go:117] "RemoveContainer" containerID="267ed5a6dd6ec77d04a9c5ce03a3cd0327ea124fdba4126b565c34b34b0d6014" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.617557 4941 scope.go:117] "RemoveContainer" containerID="d1444176c7f62858717bcdb5f8821c6cf482c99c0b1bac70adccf0b14c0cde7c" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.654480 4941 scope.go:117] "RemoveContainer" containerID="b1f933b51ff93fa7a1c8eb2e271d814e0c9d46c1eb93b1450edfc9e30a2294bd" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.671738 4941 scope.go:117] "RemoveContainer" containerID="70c42b65522d407746ecc1913faef76bac0918ec07d98dfe0a1642f8dcf3e157" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.687471 4941 scope.go:117] "RemoveContainer" containerID="36b2bd1a497fffdb80bcceec0ebd1e8f6f365d640568c939adc2448b434c62b9" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.714613 4941 scope.go:117] "RemoveContainer" containerID="913c25abf91aa4ed662ae2fe67e807f82a53a365f35a622ebcc77a8a04941143" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.739463 4941 scope.go:117] "RemoveContainer" containerID="d8144984463211b224e6bafdd6518e9366676352c73717b35407f531e2e1d134" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.755738 4941 scope.go:117] "RemoveContainer" containerID="b0c2c7fa1005f08c39968a1a685ca3212988968efd0e99b914c94cc6cf534f58" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.793607 4941 scope.go:117] "RemoveContainer" containerID="2fbb2c22c3103ac186df3be80937e50d5dbe7ab19422754df8bed50cf8d400b3" Nov 30 07:11:36 crc kubenswrapper[4941]: I1130 07:11:36.819475 4941 scope.go:117] "RemoveContainer" containerID="a4e701153073d1d552b85184ac3a38256bd5f4d62dfd4daead28e74cf142a449" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.119374 4941 scope.go:117] "RemoveContainer" containerID="a450fc13d9cba17de5b9a5145a69dd69674600063f71ac28c96e01143111704a" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.153482 4941 scope.go:117] "RemoveContainer" containerID="305c3aba8af09bc4d0a3d63208f7e2949135c2ee18761aac46287cf1068feab1" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.199902 4941 scope.go:117] "RemoveContainer" containerID="9fbbb85ad30a83b7456fb44a31c2209264c5e153a674196f4ae8765e3a9deed8" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.259286 4941 scope.go:117] "RemoveContainer" containerID="26ada5d7acc3a461a8a330fae6ff00d6ad1801a8caa3a433600d9307e3c1a50d" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.282415 4941 scope.go:117] "RemoveContainer" containerID="aeb000c0386dce47c7a56b3872cfb39b10d750b9a481ebdd2c575d97c0bbecbd" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.304500 4941 scope.go:117] "RemoveContainer" containerID="f07d4fed2519239f0e4fb63e2104fd272dece2ded2e1def806ae733a19ed8d77" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.338227 4941 scope.go:117] "RemoveContainer" containerID="8341e11286820a4a80b6cab481b2607c6de18d38c5dd607d7d9d36f09e505c21" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.367944 4941 scope.go:117] "RemoveContainer" containerID="a89fcf084f19b692e7cb17f1e0ea67edc9149cb997bef9b438b46193fdbaca60" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.396344 4941 scope.go:117] "RemoveContainer" containerID="fc82cd08bae8260c63d177cb520a3ad485ad5481cded0679001673d3f6da3c35" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.416218 4941 scope.go:117] "RemoveContainer" containerID="539d89a15edc70dc4c19cc2280d7df494d363b3461a1f1f53b5b2c3c6f64de13" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.438464 4941 scope.go:117] "RemoveContainer" containerID="a3e75aa9880eaa34a2042e2ff3704bcab240a15c359e560eb07f1dbc92fb900d" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.459744 4941 scope.go:117] "RemoveContainer" containerID="a96ce6ca3db082f07bc30a85b2dd3ed276f669d63c74e2397c8b08ebfa4f983e" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.491988 4941 scope.go:117] "RemoveContainer" containerID="901debcd5ec58b9fd628099c8ca6fab3d5432f5cf3c027e27b344f81cf8d7260" Nov 30 07:12:37 crc kubenswrapper[4941]: I1130 07:12:37.512017 4941 scope.go:117] "RemoveContainer" containerID="009377a60fa215ae8250c38bc50dca1c5bfb321e79ea6ee7dd44dd9fbdcf7b42" Nov 30 07:13:32 crc kubenswrapper[4941]: I1130 07:13:32.979474 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:13:32 crc kubenswrapper[4941]: I1130 07:13:32.980541 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:13:37 crc kubenswrapper[4941]: I1130 07:13:37.698665 4941 scope.go:117] "RemoveContainer" containerID="54724f9e7b9cb397a6dbf2bf4b7b305271ebdeef511fa6d99df016ffce4d170d" Nov 30 07:13:37 crc kubenswrapper[4941]: I1130 07:13:37.743104 4941 scope.go:117] "RemoveContainer" containerID="d60b2e93cd8fe47985ea3c022d6fe900447816293ccb970fad6d8c2750bd7db3" Nov 30 07:13:37 crc kubenswrapper[4941]: I1130 07:13:37.827484 4941 scope.go:117] "RemoveContainer" containerID="0c5d9a8e5bf04936fcd7fef9a3fbc0f88f628cb0ea4146cdb0d370970a42b1f4" Nov 30 07:13:37 crc kubenswrapper[4941]: I1130 07:13:37.876852 4941 scope.go:117] "RemoveContainer" containerID="96e02e36152e3e77bc3bcb37965f0c42f78aaeec0dbbaa72be4ad8497a927704" Nov 30 07:13:37 crc kubenswrapper[4941]: I1130 07:13:37.906684 4941 scope.go:117] "RemoveContainer" containerID="e3ee661e169cba152a8653fbfa50300784a65ee412f2fc53dfa56f11c302721e" Nov 30 07:14:02 crc kubenswrapper[4941]: I1130 07:14:02.979422 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:14:02 crc kubenswrapper[4941]: I1130 07:14:02.980213 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:14:32 crc kubenswrapper[4941]: I1130 07:14:32.978725 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:14:32 crc kubenswrapper[4941]: I1130 07:14:32.979253 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:14:32 crc kubenswrapper[4941]: I1130 07:14:32.979312 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:14:32 crc kubenswrapper[4941]: I1130 07:14:32.980102 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:14:32 crc kubenswrapper[4941]: I1130 07:14:32.980168 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" gracePeriod=600 Nov 30 07:14:33 crc kubenswrapper[4941]: E1130 07:14:33.105457 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:14:33 crc kubenswrapper[4941]: I1130 07:14:33.568641 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" exitCode=0 Nov 30 07:14:33 crc kubenswrapper[4941]: I1130 07:14:33.568707 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1"} Nov 30 07:14:33 crc kubenswrapper[4941]: I1130 07:14:33.568785 4941 scope.go:117] "RemoveContainer" containerID="9fa56f4fe3d0a299069614d024c15c66b787f4d645343ae4c789d83f64a98208" Nov 30 07:14:33 crc kubenswrapper[4941]: I1130 07:14:33.569459 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:14:33 crc kubenswrapper[4941]: E1130 07:14:33.569793 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.063605 4941 scope.go:117] "RemoveContainer" containerID="fd044d0caf33128dc424d871cee00e10115231eb3535e2249f4d39b8c3c04af1" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.098860 4941 scope.go:117] "RemoveContainer" containerID="b105b8d565a21c51a0e3819153e09f1faf3ad8232f7dd86d2a96a1c95531e6bf" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.168992 4941 scope.go:117] "RemoveContainer" containerID="199b3e26fc0f6545467dc0bfc386bbb742110c36e2dd4105695b124948c0d840" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.195094 4941 scope.go:117] "RemoveContainer" containerID="8e135084f5429afb87f87c92443142f7bdd20c8c7f175058d25d225c984bf164" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.212575 4941 scope.go:117] "RemoveContainer" containerID="b77426d5a27eb3609eea859e72da0ab57c475fc3ec836c9ba0a47eb046b015a1" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.229696 4941 scope.go:117] "RemoveContainer" containerID="a9753f5333e8cf6a6a6e8a5b6bb203dcea93cf7b8757984a96698adeffa9d226" Nov 30 07:14:38 crc kubenswrapper[4941]: I1130 07:14:38.246939 4941 scope.go:117] "RemoveContainer" containerID="80eaa1c0bcf74c714669821c2f8c3b37c333106eca38a0ef24d09c0faa3ec4c0" Nov 30 07:14:48 crc kubenswrapper[4941]: I1130 07:14:48.522158 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:14:48 crc kubenswrapper[4941]: E1130 07:14:48.523297 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.162809 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd"] Nov 30 07:15:00 crc kubenswrapper[4941]: E1130 07:15:00.163996 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="extract-content" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.164014 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="extract-content" Nov 30 07:15:00 crc kubenswrapper[4941]: E1130 07:15:00.164035 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="extract-utilities" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.164043 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="extract-utilities" Nov 30 07:15:00 crc kubenswrapper[4941]: E1130 07:15:00.164079 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="registry-server" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.164090 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="registry-server" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.164304 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc391fad-c4e5-4cae-8eff-2d50542e0395" containerName="registry-server" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.164989 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.167240 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.167623 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.177582 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd"] Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.240631 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d26ef129-af80-41e0-9457-f05a65547495-secret-volume\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.240683 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d26ef129-af80-41e0-9457-f05a65547495-config-volume\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.240704 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvvbx\" (UniqueName: \"kubernetes.io/projected/d26ef129-af80-41e0-9457-f05a65547495-kube-api-access-fvvbx\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.342244 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d26ef129-af80-41e0-9457-f05a65547495-secret-volume\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.342587 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d26ef129-af80-41e0-9457-f05a65547495-config-volume\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.342767 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvvbx\" (UniqueName: \"kubernetes.io/projected/d26ef129-af80-41e0-9457-f05a65547495-kube-api-access-fvvbx\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.343624 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d26ef129-af80-41e0-9457-f05a65547495-config-volume\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.355083 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d26ef129-af80-41e0-9457-f05a65547495-secret-volume\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.357432 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvvbx\" (UniqueName: \"kubernetes.io/projected/d26ef129-af80-41e0-9457-f05a65547495-kube-api-access-fvvbx\") pod \"collect-profiles-29408115-lntnd\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.494479 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:00 crc kubenswrapper[4941]: I1130 07:15:00.954539 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd"] Nov 30 07:15:01 crc kubenswrapper[4941]: I1130 07:15:01.809654 4941 generic.go:334] "Generic (PLEG): container finished" podID="d26ef129-af80-41e0-9457-f05a65547495" containerID="8411c669ce5a701f951a585beb3e720ee0a47eb60e9d73b9f1a23d040028d327" exitCode=0 Nov 30 07:15:01 crc kubenswrapper[4941]: I1130 07:15:01.809694 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" event={"ID":"d26ef129-af80-41e0-9457-f05a65547495","Type":"ContainerDied","Data":"8411c669ce5a701f951a585beb3e720ee0a47eb60e9d73b9f1a23d040028d327"} Nov 30 07:15:01 crc kubenswrapper[4941]: I1130 07:15:01.809720 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" event={"ID":"d26ef129-af80-41e0-9457-f05a65547495","Type":"ContainerStarted","Data":"6390c4cce474763c0769ef5d3bc3d37a14c7d445dd07d7cee2ced29f3bcd4d52"} Nov 30 07:15:02 crc kubenswrapper[4941]: I1130 07:15:02.522313 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:15:02 crc kubenswrapper[4941]: E1130 07:15:02.522862 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.038849 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.077398 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d26ef129-af80-41e0-9457-f05a65547495-config-volume\") pod \"d26ef129-af80-41e0-9457-f05a65547495\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.077450 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvvbx\" (UniqueName: \"kubernetes.io/projected/d26ef129-af80-41e0-9457-f05a65547495-kube-api-access-fvvbx\") pod \"d26ef129-af80-41e0-9457-f05a65547495\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.077507 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d26ef129-af80-41e0-9457-f05a65547495-secret-volume\") pod \"d26ef129-af80-41e0-9457-f05a65547495\" (UID: \"d26ef129-af80-41e0-9457-f05a65547495\") " Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.078396 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d26ef129-af80-41e0-9457-f05a65547495-config-volume" (OuterVolumeSpecName: "config-volume") pod "d26ef129-af80-41e0-9457-f05a65547495" (UID: "d26ef129-af80-41e0-9457-f05a65547495"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.083857 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d26ef129-af80-41e0-9457-f05a65547495-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d26ef129-af80-41e0-9457-f05a65547495" (UID: "d26ef129-af80-41e0-9457-f05a65547495"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.084589 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d26ef129-af80-41e0-9457-f05a65547495-kube-api-access-fvvbx" (OuterVolumeSpecName: "kube-api-access-fvvbx") pod "d26ef129-af80-41e0-9457-f05a65547495" (UID: "d26ef129-af80-41e0-9457-f05a65547495"). InnerVolumeSpecName "kube-api-access-fvvbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.178456 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d26ef129-af80-41e0-9457-f05a65547495-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.178483 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvvbx\" (UniqueName: \"kubernetes.io/projected/d26ef129-af80-41e0-9457-f05a65547495-kube-api-access-fvvbx\") on node \"crc\" DevicePath \"\"" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.178494 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d26ef129-af80-41e0-9457-f05a65547495-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.825036 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" event={"ID":"d26ef129-af80-41e0-9457-f05a65547495","Type":"ContainerDied","Data":"6390c4cce474763c0769ef5d3bc3d37a14c7d445dd07d7cee2ced29f3bcd4d52"} Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.825085 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6390c4cce474763c0769ef5d3bc3d37a14c7d445dd07d7cee2ced29f3bcd4d52" Nov 30 07:15:03 crc kubenswrapper[4941]: I1130 07:15:03.825077 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd" Nov 30 07:15:13 crc kubenswrapper[4941]: I1130 07:15:13.522130 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:15:13 crc kubenswrapper[4941]: E1130 07:15:13.523185 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:15:24 crc kubenswrapper[4941]: I1130 07:15:24.522376 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:15:24 crc kubenswrapper[4941]: E1130 07:15:24.523431 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:15:36 crc kubenswrapper[4941]: I1130 07:15:36.522048 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:15:36 crc kubenswrapper[4941]: E1130 07:15:36.523275 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:15:38 crc kubenswrapper[4941]: I1130 07:15:38.345036 4941 scope.go:117] "RemoveContainer" containerID="3fbce1fd8d0f846aa7dbe3303e0c31d6b5075eef49ab34fff3344d334300b079" Nov 30 07:15:38 crc kubenswrapper[4941]: I1130 07:15:38.365068 4941 scope.go:117] "RemoveContainer" containerID="79080395206e181e7e0cedceeca71a93fc5f49de3ec9b01c6ffadf8a5877a042" Nov 30 07:15:51 crc kubenswrapper[4941]: I1130 07:15:51.522684 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:15:51 crc kubenswrapper[4941]: E1130 07:15:51.523414 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:16:05 crc kubenswrapper[4941]: I1130 07:16:05.522122 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:16:05 crc kubenswrapper[4941]: E1130 07:16:05.523064 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:16:19 crc kubenswrapper[4941]: I1130 07:16:19.530990 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:16:19 crc kubenswrapper[4941]: E1130 07:16:19.534518 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:16:32 crc kubenswrapper[4941]: I1130 07:16:32.522032 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:16:32 crc kubenswrapper[4941]: E1130 07:16:32.523302 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:16:46 crc kubenswrapper[4941]: I1130 07:16:46.522289 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:16:46 crc kubenswrapper[4941]: E1130 07:16:46.523098 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:16:57 crc kubenswrapper[4941]: I1130 07:16:57.522207 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:16:57 crc kubenswrapper[4941]: E1130 07:16:57.523155 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:17:12 crc kubenswrapper[4941]: I1130 07:17:12.522195 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:17:12 crc kubenswrapper[4941]: E1130 07:17:12.523309 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:17:25 crc kubenswrapper[4941]: I1130 07:17:25.521716 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:17:25 crc kubenswrapper[4941]: E1130 07:17:25.523079 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:17:37 crc kubenswrapper[4941]: I1130 07:17:37.522265 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:17:37 crc kubenswrapper[4941]: E1130 07:17:37.523128 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:17:48 crc kubenswrapper[4941]: I1130 07:17:48.522574 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:17:48 crc kubenswrapper[4941]: E1130 07:17:48.523280 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:17:59 crc kubenswrapper[4941]: I1130 07:17:59.527497 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:17:59 crc kubenswrapper[4941]: E1130 07:17:59.528782 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:18:14 crc kubenswrapper[4941]: I1130 07:18:14.521593 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:18:14 crc kubenswrapper[4941]: E1130 07:18:14.522565 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.728610 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t4pgn"] Nov 30 07:18:18 crc kubenswrapper[4941]: E1130 07:18:18.729506 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26ef129-af80-41e0-9457-f05a65547495" containerName="collect-profiles" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.729529 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26ef129-af80-41e0-9457-f05a65547495" containerName="collect-profiles" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.729841 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d26ef129-af80-41e0-9457-f05a65547495" containerName="collect-profiles" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.731694 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.758703 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t4pgn"] Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.765180 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6twk7\" (UniqueName: \"kubernetes.io/projected/1faf57c1-e7e4-4ffc-b825-e305253ee49f-kube-api-access-6twk7\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.765217 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-utilities\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.765255 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-catalog-content\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.866064 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6twk7\" (UniqueName: \"kubernetes.io/projected/1faf57c1-e7e4-4ffc-b825-e305253ee49f-kube-api-access-6twk7\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.866106 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-utilities\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.866138 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-catalog-content\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.866677 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-catalog-content\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.867064 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-utilities\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:18 crc kubenswrapper[4941]: I1130 07:18:18.893686 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6twk7\" (UniqueName: \"kubernetes.io/projected/1faf57c1-e7e4-4ffc-b825-e305253ee49f-kube-api-access-6twk7\") pod \"certified-operators-t4pgn\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:19 crc kubenswrapper[4941]: I1130 07:18:19.076602 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:19 crc kubenswrapper[4941]: I1130 07:18:19.549855 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t4pgn"] Nov 30 07:18:19 crc kubenswrapper[4941]: I1130 07:18:19.752239 4941 generic.go:334] "Generic (PLEG): container finished" podID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerID="87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926" exitCode=0 Nov 30 07:18:19 crc kubenswrapper[4941]: I1130 07:18:19.752279 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerDied","Data":"87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926"} Nov 30 07:18:19 crc kubenswrapper[4941]: I1130 07:18:19.752318 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerStarted","Data":"390c02bdeee1ce440973cba15f8dd76358418145f6b160252b410e136435cafc"} Nov 30 07:18:19 crc kubenswrapper[4941]: I1130 07:18:19.754891 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:18:20 crc kubenswrapper[4941]: I1130 07:18:20.763816 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerStarted","Data":"142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9"} Nov 30 07:18:21 crc kubenswrapper[4941]: I1130 07:18:21.771658 4941 generic.go:334] "Generic (PLEG): container finished" podID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerID="142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9" exitCode=0 Nov 30 07:18:21 crc kubenswrapper[4941]: I1130 07:18:21.771706 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerDied","Data":"142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9"} Nov 30 07:18:22 crc kubenswrapper[4941]: I1130 07:18:22.779695 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerStarted","Data":"5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6"} Nov 30 07:18:22 crc kubenswrapper[4941]: I1130 07:18:22.799809 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t4pgn" podStartSLOduration=2.297425421 podStartE2EDuration="4.799789885s" podCreationTimestamp="2025-11-30 07:18:18 +0000 UTC" firstStartedPulling="2025-11-30 07:18:19.753439905 +0000 UTC m=+1920.521611514" lastFinishedPulling="2025-11-30 07:18:22.255804369 +0000 UTC m=+1923.023975978" observedRunningTime="2025-11-30 07:18:22.792486799 +0000 UTC m=+1923.560658398" watchObservedRunningTime="2025-11-30 07:18:22.799789885 +0000 UTC m=+1923.567961494" Nov 30 07:18:26 crc kubenswrapper[4941]: I1130 07:18:26.521725 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:18:26 crc kubenswrapper[4941]: E1130 07:18:26.522407 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:18:29 crc kubenswrapper[4941]: I1130 07:18:29.076935 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:29 crc kubenswrapper[4941]: I1130 07:18:29.077657 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:29 crc kubenswrapper[4941]: I1130 07:18:29.167861 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:29 crc kubenswrapper[4941]: I1130 07:18:29.939477 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:31 crc kubenswrapper[4941]: I1130 07:18:31.719435 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t4pgn"] Nov 30 07:18:32 crc kubenswrapper[4941]: I1130 07:18:32.887840 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t4pgn" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="registry-server" containerID="cri-o://5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6" gracePeriod=2 Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.832601 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.901393 4941 generic.go:334] "Generic (PLEG): container finished" podID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerID="5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6" exitCode=0 Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.901447 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerDied","Data":"5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6"} Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.901476 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t4pgn" event={"ID":"1faf57c1-e7e4-4ffc-b825-e305253ee49f","Type":"ContainerDied","Data":"390c02bdeee1ce440973cba15f8dd76358418145f6b160252b410e136435cafc"} Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.901474 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t4pgn" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.901560 4941 scope.go:117] "RemoveContainer" containerID="5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.926102 4941 scope.go:117] "RemoveContainer" containerID="142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.938848 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6twk7\" (UniqueName: \"kubernetes.io/projected/1faf57c1-e7e4-4ffc-b825-e305253ee49f-kube-api-access-6twk7\") pod \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.938937 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-catalog-content\") pod \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.939169 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-utilities\") pod \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\" (UID: \"1faf57c1-e7e4-4ffc-b825-e305253ee49f\") " Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.940528 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-utilities" (OuterVolumeSpecName: "utilities") pod "1faf57c1-e7e4-4ffc-b825-e305253ee49f" (UID: "1faf57c1-e7e4-4ffc-b825-e305253ee49f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.942801 4941 scope.go:117] "RemoveContainer" containerID="87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.947622 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1faf57c1-e7e4-4ffc-b825-e305253ee49f-kube-api-access-6twk7" (OuterVolumeSpecName: "kube-api-access-6twk7") pod "1faf57c1-e7e4-4ffc-b825-e305253ee49f" (UID: "1faf57c1-e7e4-4ffc-b825-e305253ee49f"). InnerVolumeSpecName "kube-api-access-6twk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.996806 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1faf57c1-e7e4-4ffc-b825-e305253ee49f" (UID: "1faf57c1-e7e4-4ffc-b825-e305253ee49f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.997947 4941 scope.go:117] "RemoveContainer" containerID="5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6" Nov 30 07:18:33 crc kubenswrapper[4941]: E1130 07:18:33.998929 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6\": container with ID starting with 5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6 not found: ID does not exist" containerID="5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.998967 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6"} err="failed to get container status \"5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6\": rpc error: code = NotFound desc = could not find container \"5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6\": container with ID starting with 5c3139b0d05824b82f18b185ffc74cb7aebab5d592df95c7f16e08bbd98fb7b6 not found: ID does not exist" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.998990 4941 scope.go:117] "RemoveContainer" containerID="142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9" Nov 30 07:18:33 crc kubenswrapper[4941]: E1130 07:18:33.999669 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9\": container with ID starting with 142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9 not found: ID does not exist" containerID="142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.999710 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9"} err="failed to get container status \"142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9\": rpc error: code = NotFound desc = could not find container \"142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9\": container with ID starting with 142e7208f952fa8f031461672374bbbe3bca50c32a609763f2b40ed2d2567af9 not found: ID does not exist" Nov 30 07:18:33 crc kubenswrapper[4941]: I1130 07:18:33.999739 4941 scope.go:117] "RemoveContainer" containerID="87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926" Nov 30 07:18:34 crc kubenswrapper[4941]: E1130 07:18:34.000093 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926\": container with ID starting with 87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926 not found: ID does not exist" containerID="87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926" Nov 30 07:18:34 crc kubenswrapper[4941]: I1130 07:18:34.000123 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926"} err="failed to get container status \"87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926\": rpc error: code = NotFound desc = could not find container \"87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926\": container with ID starting with 87d9501875606af49646cd84a6b0a2ad94fdd336d9805ed87cded6556397c926 not found: ID does not exist" Nov 30 07:18:34 crc kubenswrapper[4941]: I1130 07:18:34.040413 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:18:34 crc kubenswrapper[4941]: I1130 07:18:34.040440 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1faf57c1-e7e4-4ffc-b825-e305253ee49f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:18:34 crc kubenswrapper[4941]: I1130 07:18:34.040454 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6twk7\" (UniqueName: \"kubernetes.io/projected/1faf57c1-e7e4-4ffc-b825-e305253ee49f-kube-api-access-6twk7\") on node \"crc\" DevicePath \"\"" Nov 30 07:18:34 crc kubenswrapper[4941]: I1130 07:18:34.244983 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t4pgn"] Nov 30 07:18:34 crc kubenswrapper[4941]: I1130 07:18:34.253157 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t4pgn"] Nov 30 07:18:35 crc kubenswrapper[4941]: I1130 07:18:35.534730 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" path="/var/lib/kubelet/pods/1faf57c1-e7e4-4ffc-b825-e305253ee49f/volumes" Nov 30 07:18:40 crc kubenswrapper[4941]: I1130 07:18:40.522232 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:18:40 crc kubenswrapper[4941]: E1130 07:18:40.522725 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:18:52 crc kubenswrapper[4941]: I1130 07:18:52.521972 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:18:52 crc kubenswrapper[4941]: E1130 07:18:52.522777 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:19:04 crc kubenswrapper[4941]: I1130 07:19:04.522079 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:19:04 crc kubenswrapper[4941]: E1130 07:19:04.522981 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:19:19 crc kubenswrapper[4941]: I1130 07:19:19.522365 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:19:19 crc kubenswrapper[4941]: E1130 07:19:19.523236 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:19:34 crc kubenswrapper[4941]: I1130 07:19:34.522216 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:19:35 crc kubenswrapper[4941]: I1130 07:19:35.412449 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"1eb6c48ece68402030d125c28508740780ac7d91c500b5fc8b309103ac64767d"} Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.778504 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qzm6d"] Nov 30 07:20:11 crc kubenswrapper[4941]: E1130 07:20:11.779986 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="extract-utilities" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.780013 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="extract-utilities" Nov 30 07:20:11 crc kubenswrapper[4941]: E1130 07:20:11.780056 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="extract-content" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.780070 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="extract-content" Nov 30 07:20:11 crc kubenswrapper[4941]: E1130 07:20:11.780107 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="registry-server" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.780120 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="registry-server" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.780455 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1faf57c1-e7e4-4ffc-b825-e305253ee49f" containerName="registry-server" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.814717 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzm6d"] Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.814911 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.886634 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-catalog-content\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.886783 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-utilities\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.887223 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsftd\" (UniqueName: \"kubernetes.io/projected/db80b24d-b085-4b6f-b06a-571a17774a5c-kube-api-access-nsftd\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.989399 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsftd\" (UniqueName: \"kubernetes.io/projected/db80b24d-b085-4b6f-b06a-571a17774a5c-kube-api-access-nsftd\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.989496 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-catalog-content\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.989530 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-utilities\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.990154 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-utilities\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:11 crc kubenswrapper[4941]: I1130 07:20:11.990376 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-catalog-content\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:12 crc kubenswrapper[4941]: I1130 07:20:12.014810 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsftd\" (UniqueName: \"kubernetes.io/projected/db80b24d-b085-4b6f-b06a-571a17774a5c-kube-api-access-nsftd\") pod \"redhat-operators-qzm6d\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:12 crc kubenswrapper[4941]: I1130 07:20:12.145801 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:12 crc kubenswrapper[4941]: I1130 07:20:12.617260 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzm6d"] Nov 30 07:20:12 crc kubenswrapper[4941]: I1130 07:20:12.774202 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzm6d" event={"ID":"db80b24d-b085-4b6f-b06a-571a17774a5c","Type":"ContainerStarted","Data":"623aec73d7335f52b87101a2adbe7782a2cdf73d8afd79e1095d26d3d8c8b152"} Nov 30 07:20:13 crc kubenswrapper[4941]: I1130 07:20:13.792706 4941 generic.go:334] "Generic (PLEG): container finished" podID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerID="90954aca1553de7e74afe9cf4b75a28ca9f5a6fd13e723e71b4b1c568451e4a2" exitCode=0 Nov 30 07:20:13 crc kubenswrapper[4941]: I1130 07:20:13.792807 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzm6d" event={"ID":"db80b24d-b085-4b6f-b06a-571a17774a5c","Type":"ContainerDied","Data":"90954aca1553de7e74afe9cf4b75a28ca9f5a6fd13e723e71b4b1c568451e4a2"} Nov 30 07:20:15 crc kubenswrapper[4941]: I1130 07:20:15.821755 4941 generic.go:334] "Generic (PLEG): container finished" podID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerID="c2dca3a8197bbbafcb1aedfd051a225da3e116c3d3ff4148dc065ea6d1fd5bf4" exitCode=0 Nov 30 07:20:15 crc kubenswrapper[4941]: I1130 07:20:15.821839 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzm6d" event={"ID":"db80b24d-b085-4b6f-b06a-571a17774a5c","Type":"ContainerDied","Data":"c2dca3a8197bbbafcb1aedfd051a225da3e116c3d3ff4148dc065ea6d1fd5bf4"} Nov 30 07:20:16 crc kubenswrapper[4941]: I1130 07:20:16.836280 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzm6d" event={"ID":"db80b24d-b085-4b6f-b06a-571a17774a5c","Type":"ContainerStarted","Data":"50a1cd7bdb39eb6b3fd7c53760edc1cca067982315ea2cce9f6e741fb368c7ac"} Nov 30 07:20:16 crc kubenswrapper[4941]: I1130 07:20:16.865170 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qzm6d" podStartSLOduration=3.442884042 podStartE2EDuration="5.865146325s" podCreationTimestamp="2025-11-30 07:20:11 +0000 UTC" firstStartedPulling="2025-11-30 07:20:13.795944126 +0000 UTC m=+2034.564115775" lastFinishedPulling="2025-11-30 07:20:16.218206449 +0000 UTC m=+2036.986378058" observedRunningTime="2025-11-30 07:20:16.864545566 +0000 UTC m=+2037.632717205" watchObservedRunningTime="2025-11-30 07:20:16.865146325 +0000 UTC m=+2037.633317934" Nov 30 07:20:22 crc kubenswrapper[4941]: I1130 07:20:22.146440 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:22 crc kubenswrapper[4941]: I1130 07:20:22.147018 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:23 crc kubenswrapper[4941]: I1130 07:20:23.188886 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qzm6d" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="registry-server" probeResult="failure" output=< Nov 30 07:20:23 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 07:20:23 crc kubenswrapper[4941]: > Nov 30 07:20:28 crc kubenswrapper[4941]: I1130 07:20:28.832343 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jfdh5"] Nov 30 07:20:28 crc kubenswrapper[4941]: I1130 07:20:28.835148 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:28 crc kubenswrapper[4941]: I1130 07:20:28.857563 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jfdh5"] Nov 30 07:20:28 crc kubenswrapper[4941]: I1130 07:20:28.924999 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-catalog-content\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:28 crc kubenswrapper[4941]: I1130 07:20:28.925137 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-utilities\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:28 crc kubenswrapper[4941]: I1130 07:20:28.925404 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl24c\" (UniqueName: \"kubernetes.io/projected/b91c2050-2468-45da-9acf-ed02d813a060-kube-api-access-vl24c\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.026915 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-catalog-content\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.026998 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-utilities\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.027069 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl24c\" (UniqueName: \"kubernetes.io/projected/b91c2050-2468-45da-9acf-ed02d813a060-kube-api-access-vl24c\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.027558 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-catalog-content\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.027649 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-utilities\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.051360 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl24c\" (UniqueName: \"kubernetes.io/projected/b91c2050-2468-45da-9acf-ed02d813a060-kube-api-access-vl24c\") pod \"redhat-marketplace-jfdh5\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.168462 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.658891 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jfdh5"] Nov 30 07:20:29 crc kubenswrapper[4941]: W1130 07:20:29.662815 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb91c2050_2468_45da_9acf_ed02d813a060.slice/crio-6a8af0928571337db6661acb2d982e8d0cc50007b912adec4876c45e940cd605 WatchSource:0}: Error finding container 6a8af0928571337db6661acb2d982e8d0cc50007b912adec4876c45e940cd605: Status 404 returned error can't find the container with id 6a8af0928571337db6661acb2d982e8d0cc50007b912adec4876c45e940cd605 Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.965008 4941 generic.go:334] "Generic (PLEG): container finished" podID="b91c2050-2468-45da-9acf-ed02d813a060" containerID="e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e" exitCode=0 Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.965087 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jfdh5" event={"ID":"b91c2050-2468-45da-9acf-ed02d813a060","Type":"ContainerDied","Data":"e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e"} Nov 30 07:20:29 crc kubenswrapper[4941]: I1130 07:20:29.965509 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jfdh5" event={"ID":"b91c2050-2468-45da-9acf-ed02d813a060","Type":"ContainerStarted","Data":"6a8af0928571337db6661acb2d982e8d0cc50007b912adec4876c45e940cd605"} Nov 30 07:20:31 crc kubenswrapper[4941]: I1130 07:20:31.993063 4941 generic.go:334] "Generic (PLEG): container finished" podID="b91c2050-2468-45da-9acf-ed02d813a060" containerID="83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e" exitCode=0 Nov 30 07:20:31 crc kubenswrapper[4941]: I1130 07:20:31.993242 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jfdh5" event={"ID":"b91c2050-2468-45da-9acf-ed02d813a060","Type":"ContainerDied","Data":"83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e"} Nov 30 07:20:32 crc kubenswrapper[4941]: I1130 07:20:32.223774 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:32 crc kubenswrapper[4941]: I1130 07:20:32.300041 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:33 crc kubenswrapper[4941]: I1130 07:20:33.004287 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jfdh5" event={"ID":"b91c2050-2468-45da-9acf-ed02d813a060","Type":"ContainerStarted","Data":"5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3"} Nov 30 07:20:33 crc kubenswrapper[4941]: I1130 07:20:33.024671 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jfdh5" podStartSLOduration=2.437652684 podStartE2EDuration="5.024650313s" podCreationTimestamp="2025-11-30 07:20:28 +0000 UTC" firstStartedPulling="2025-11-30 07:20:29.967035211 +0000 UTC m=+2050.735206830" lastFinishedPulling="2025-11-30 07:20:32.55403281 +0000 UTC m=+2053.322204459" observedRunningTime="2025-11-30 07:20:33.022059213 +0000 UTC m=+2053.790230832" watchObservedRunningTime="2025-11-30 07:20:33.024650313 +0000 UTC m=+2053.792821922" Nov 30 07:20:34 crc kubenswrapper[4941]: I1130 07:20:34.598314 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qzm6d"] Nov 30 07:20:34 crc kubenswrapper[4941]: I1130 07:20:34.599314 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qzm6d" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="registry-server" containerID="cri-o://50a1cd7bdb39eb6b3fd7c53760edc1cca067982315ea2cce9f6e741fb368c7ac" gracePeriod=2 Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.021485 4941 generic.go:334] "Generic (PLEG): container finished" podID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerID="50a1cd7bdb39eb6b3fd7c53760edc1cca067982315ea2cce9f6e741fb368c7ac" exitCode=0 Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.021581 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzm6d" event={"ID":"db80b24d-b085-4b6f-b06a-571a17774a5c","Type":"ContainerDied","Data":"50a1cd7bdb39eb6b3fd7c53760edc1cca067982315ea2cce9f6e741fb368c7ac"} Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.021831 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzm6d" event={"ID":"db80b24d-b085-4b6f-b06a-571a17774a5c","Type":"ContainerDied","Data":"623aec73d7335f52b87101a2adbe7782a2cdf73d8afd79e1095d26d3d8c8b152"} Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.021878 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="623aec73d7335f52b87101a2adbe7782a2cdf73d8afd79e1095d26d3d8c8b152" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.037935 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.039459 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-catalog-content\") pod \"db80b24d-b085-4b6f-b06a-571a17774a5c\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.039492 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-utilities\") pod \"db80b24d-b085-4b6f-b06a-571a17774a5c\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.039603 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsftd\" (UniqueName: \"kubernetes.io/projected/db80b24d-b085-4b6f-b06a-571a17774a5c-kube-api-access-nsftd\") pod \"db80b24d-b085-4b6f-b06a-571a17774a5c\" (UID: \"db80b24d-b085-4b6f-b06a-571a17774a5c\") " Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.040488 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-utilities" (OuterVolumeSpecName: "utilities") pod "db80b24d-b085-4b6f-b06a-571a17774a5c" (UID: "db80b24d-b085-4b6f-b06a-571a17774a5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.048025 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db80b24d-b085-4b6f-b06a-571a17774a5c-kube-api-access-nsftd" (OuterVolumeSpecName: "kube-api-access-nsftd") pod "db80b24d-b085-4b6f-b06a-571a17774a5c" (UID: "db80b24d-b085-4b6f-b06a-571a17774a5c"). InnerVolumeSpecName "kube-api-access-nsftd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.140294 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsftd\" (UniqueName: \"kubernetes.io/projected/db80b24d-b085-4b6f-b06a-571a17774a5c-kube-api-access-nsftd\") on node \"crc\" DevicePath \"\"" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.140343 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.164032 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db80b24d-b085-4b6f-b06a-571a17774a5c" (UID: "db80b24d-b085-4b6f-b06a-571a17774a5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:20:35 crc kubenswrapper[4941]: I1130 07:20:35.241295 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db80b24d-b085-4b6f-b06a-571a17774a5c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:20:36 crc kubenswrapper[4941]: I1130 07:20:36.031452 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzm6d" Nov 30 07:20:36 crc kubenswrapper[4941]: I1130 07:20:36.071248 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qzm6d"] Nov 30 07:20:36 crc kubenswrapper[4941]: I1130 07:20:36.081504 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qzm6d"] Nov 30 07:20:37 crc kubenswrapper[4941]: I1130 07:20:37.542900 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" path="/var/lib/kubelet/pods/db80b24d-b085-4b6f-b06a-571a17774a5c/volumes" Nov 30 07:20:39 crc kubenswrapper[4941]: I1130 07:20:39.169767 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:39 crc kubenswrapper[4941]: I1130 07:20:39.170368 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:39 crc kubenswrapper[4941]: I1130 07:20:39.261889 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:40 crc kubenswrapper[4941]: I1130 07:20:40.142313 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:40 crc kubenswrapper[4941]: I1130 07:20:40.199266 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jfdh5"] Nov 30 07:20:42 crc kubenswrapper[4941]: I1130 07:20:42.090231 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jfdh5" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="registry-server" containerID="cri-o://5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3" gracePeriod=2 Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.009421 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.104849 4941 generic.go:334] "Generic (PLEG): container finished" podID="b91c2050-2468-45da-9acf-ed02d813a060" containerID="5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3" exitCode=0 Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.104950 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jfdh5" event={"ID":"b91c2050-2468-45da-9acf-ed02d813a060","Type":"ContainerDied","Data":"5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3"} Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.105008 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jfdh5" event={"ID":"b91c2050-2468-45da-9acf-ed02d813a060","Type":"ContainerDied","Data":"6a8af0928571337db6661acb2d982e8d0cc50007b912adec4876c45e940cd605"} Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.105048 4941 scope.go:117] "RemoveContainer" containerID="5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.105305 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jfdh5" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.145185 4941 scope.go:117] "RemoveContainer" containerID="83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.167851 4941 scope.go:117] "RemoveContainer" containerID="e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.191013 4941 scope.go:117] "RemoveContainer" containerID="5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3" Nov 30 07:20:43 crc kubenswrapper[4941]: E1130 07:20:43.192094 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3\": container with ID starting with 5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3 not found: ID does not exist" containerID="5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.192160 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3"} err="failed to get container status \"5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3\": rpc error: code = NotFound desc = could not find container \"5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3\": container with ID starting with 5bd449cdd9b20bd24ac14dc1e5cc66e51aea8b345855334aa8c8042fc5fcbbc3 not found: ID does not exist" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.192206 4941 scope.go:117] "RemoveContainer" containerID="83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e" Nov 30 07:20:43 crc kubenswrapper[4941]: E1130 07:20:43.192709 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e\": container with ID starting with 83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e not found: ID does not exist" containerID="83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.192777 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e"} err="failed to get container status \"83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e\": rpc error: code = NotFound desc = could not find container \"83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e\": container with ID starting with 83b5d5e782e6d7b9f4de4aa8c6369bf5931a9ae8b1088ecd86596949c26d7d0e not found: ID does not exist" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.192825 4941 scope.go:117] "RemoveContainer" containerID="e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e" Nov 30 07:20:43 crc kubenswrapper[4941]: E1130 07:20:43.193216 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e\": container with ID starting with e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e not found: ID does not exist" containerID="e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.193258 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e"} err="failed to get container status \"e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e\": rpc error: code = NotFound desc = could not find container \"e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e\": container with ID starting with e28ef9cc684a9c5c277edec33f4b2b1cde73e21d80d3b11e3739bd7c33502b2e not found: ID does not exist" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.196015 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-utilities\") pod \"b91c2050-2468-45da-9acf-ed02d813a060\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.196084 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-catalog-content\") pod \"b91c2050-2468-45da-9acf-ed02d813a060\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.196258 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl24c\" (UniqueName: \"kubernetes.io/projected/b91c2050-2468-45da-9acf-ed02d813a060-kube-api-access-vl24c\") pod \"b91c2050-2468-45da-9acf-ed02d813a060\" (UID: \"b91c2050-2468-45da-9acf-ed02d813a060\") " Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.197618 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-utilities" (OuterVolumeSpecName: "utilities") pod "b91c2050-2468-45da-9acf-ed02d813a060" (UID: "b91c2050-2468-45da-9acf-ed02d813a060"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.202648 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b91c2050-2468-45da-9acf-ed02d813a060-kube-api-access-vl24c" (OuterVolumeSpecName: "kube-api-access-vl24c") pod "b91c2050-2468-45da-9acf-ed02d813a060" (UID: "b91c2050-2468-45da-9acf-ed02d813a060"). InnerVolumeSpecName "kube-api-access-vl24c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.236123 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b91c2050-2468-45da-9acf-ed02d813a060" (UID: "b91c2050-2468-45da-9acf-ed02d813a060"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.298558 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl24c\" (UniqueName: \"kubernetes.io/projected/b91c2050-2468-45da-9acf-ed02d813a060-kube-api-access-vl24c\") on node \"crc\" DevicePath \"\"" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.298613 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.298627 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b91c2050-2468-45da-9acf-ed02d813a060-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.450239 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jfdh5"] Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.462386 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jfdh5"] Nov 30 07:20:43 crc kubenswrapper[4941]: I1130 07:20:43.530829 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b91c2050-2468-45da-9acf-ed02d813a060" path="/var/lib/kubelet/pods/b91c2050-2468-45da-9acf-ed02d813a060/volumes" Nov 30 07:22:02 crc kubenswrapper[4941]: I1130 07:22:02.978734 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:22:02 crc kubenswrapper[4941]: I1130 07:22:02.979292 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.116667 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ltgvc"] Nov 30 07:22:25 crc kubenswrapper[4941]: E1130 07:22:25.118201 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="extract-content" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118230 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="extract-content" Nov 30 07:22:25 crc kubenswrapper[4941]: E1130 07:22:25.118258 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="registry-server" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118276 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="registry-server" Nov 30 07:22:25 crc kubenswrapper[4941]: E1130 07:22:25.118308 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="extract-utilities" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118341 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="extract-utilities" Nov 30 07:22:25 crc kubenswrapper[4941]: E1130 07:22:25.118490 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="extract-content" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118507 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="extract-content" Nov 30 07:22:25 crc kubenswrapper[4941]: E1130 07:22:25.118566 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="extract-utilities" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118582 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="extract-utilities" Nov 30 07:22:25 crc kubenswrapper[4941]: E1130 07:22:25.118613 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="registry-server" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118627 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="registry-server" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118932 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="db80b24d-b085-4b6f-b06a-571a17774a5c" containerName="registry-server" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.118976 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b91c2050-2468-45da-9acf-ed02d813a060" containerName="registry-server" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.121825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.139718 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ltgvc"] Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.236008 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-catalog-content\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.236627 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-utilities\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.236690 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66rg2\" (UniqueName: \"kubernetes.io/projected/2fcd22e9-e32f-42f9-8306-102adbed087e-kube-api-access-66rg2\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.337810 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-utilities\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.337918 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66rg2\" (UniqueName: \"kubernetes.io/projected/2fcd22e9-e32f-42f9-8306-102adbed087e-kube-api-access-66rg2\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.337969 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-catalog-content\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.338648 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-utilities\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.338756 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-catalog-content\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.358995 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66rg2\" (UniqueName: \"kubernetes.io/projected/2fcd22e9-e32f-42f9-8306-102adbed087e-kube-api-access-66rg2\") pod \"community-operators-ltgvc\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.452018 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:25 crc kubenswrapper[4941]: I1130 07:22:25.916056 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ltgvc"] Nov 30 07:22:26 crc kubenswrapper[4941]: I1130 07:22:26.011891 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerStarted","Data":"274dcbb347da55ad22d5856033de8433cf803d6670b3264a29d0423400ecabb1"} Nov 30 07:22:27 crc kubenswrapper[4941]: I1130 07:22:27.022066 4941 generic.go:334] "Generic (PLEG): container finished" podID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerID="40a616bdb7c36f5060964a7292867c66955a25938a9ba13396bd329a1a015072" exitCode=0 Nov 30 07:22:27 crc kubenswrapper[4941]: I1130 07:22:27.022118 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerDied","Data":"40a616bdb7c36f5060964a7292867c66955a25938a9ba13396bd329a1a015072"} Nov 30 07:22:28 crc kubenswrapper[4941]: I1130 07:22:28.034641 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerStarted","Data":"96a43ede47a9a2933088f1247c021ca93724d7c99a5b150e0bb0bfabe95c5cc1"} Nov 30 07:22:29 crc kubenswrapper[4941]: I1130 07:22:29.046496 4941 generic.go:334] "Generic (PLEG): container finished" podID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerID="96a43ede47a9a2933088f1247c021ca93724d7c99a5b150e0bb0bfabe95c5cc1" exitCode=0 Nov 30 07:22:29 crc kubenswrapper[4941]: I1130 07:22:29.046541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerDied","Data":"96a43ede47a9a2933088f1247c021ca93724d7c99a5b150e0bb0bfabe95c5cc1"} Nov 30 07:22:30 crc kubenswrapper[4941]: I1130 07:22:30.067828 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerStarted","Data":"3400ab62fc6864e1fdb19e306e2fb64d1ced95f94169e1fe054caa570d42a88d"} Nov 30 07:22:30 crc kubenswrapper[4941]: I1130 07:22:30.110055 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ltgvc" podStartSLOduration=2.614147786 podStartE2EDuration="5.110035118s" podCreationTimestamp="2025-11-30 07:22:25 +0000 UTC" firstStartedPulling="2025-11-30 07:22:27.024820078 +0000 UTC m=+2167.792991687" lastFinishedPulling="2025-11-30 07:22:29.52070741 +0000 UTC m=+2170.288879019" observedRunningTime="2025-11-30 07:22:30.099834605 +0000 UTC m=+2170.868006214" watchObservedRunningTime="2025-11-30 07:22:30.110035118 +0000 UTC m=+2170.878206727" Nov 30 07:22:32 crc kubenswrapper[4941]: I1130 07:22:32.978651 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:22:32 crc kubenswrapper[4941]: I1130 07:22:32.979190 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:22:35 crc kubenswrapper[4941]: I1130 07:22:35.452724 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:35 crc kubenswrapper[4941]: I1130 07:22:35.453068 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:35 crc kubenswrapper[4941]: I1130 07:22:35.545085 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:36 crc kubenswrapper[4941]: I1130 07:22:36.205037 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:36 crc kubenswrapper[4941]: I1130 07:22:36.285062 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ltgvc"] Nov 30 07:22:38 crc kubenswrapper[4941]: I1130 07:22:38.148501 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ltgvc" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="registry-server" containerID="cri-o://3400ab62fc6864e1fdb19e306e2fb64d1ced95f94169e1fe054caa570d42a88d" gracePeriod=2 Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.161018 4941 generic.go:334] "Generic (PLEG): container finished" podID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerID="3400ab62fc6864e1fdb19e306e2fb64d1ced95f94169e1fe054caa570d42a88d" exitCode=0 Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.161076 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerDied","Data":"3400ab62fc6864e1fdb19e306e2fb64d1ced95f94169e1fe054caa570d42a88d"} Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.293135 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.319752 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-catalog-content\") pod \"2fcd22e9-e32f-42f9-8306-102adbed087e\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.319874 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66rg2\" (UniqueName: \"kubernetes.io/projected/2fcd22e9-e32f-42f9-8306-102adbed087e-kube-api-access-66rg2\") pod \"2fcd22e9-e32f-42f9-8306-102adbed087e\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.319926 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-utilities\") pod \"2fcd22e9-e32f-42f9-8306-102adbed087e\" (UID: \"2fcd22e9-e32f-42f9-8306-102adbed087e\") " Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.322800 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-utilities" (OuterVolumeSpecName: "utilities") pod "2fcd22e9-e32f-42f9-8306-102adbed087e" (UID: "2fcd22e9-e32f-42f9-8306-102adbed087e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.336609 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fcd22e9-e32f-42f9-8306-102adbed087e-kube-api-access-66rg2" (OuterVolumeSpecName: "kube-api-access-66rg2") pod "2fcd22e9-e32f-42f9-8306-102adbed087e" (UID: "2fcd22e9-e32f-42f9-8306-102adbed087e"). InnerVolumeSpecName "kube-api-access-66rg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.405078 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fcd22e9-e32f-42f9-8306-102adbed087e" (UID: "2fcd22e9-e32f-42f9-8306-102adbed087e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.422658 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66rg2\" (UniqueName: \"kubernetes.io/projected/2fcd22e9-e32f-42f9-8306-102adbed087e-kube-api-access-66rg2\") on node \"crc\" DevicePath \"\"" Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.422689 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:22:39 crc kubenswrapper[4941]: I1130 07:22:39.422703 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fcd22e9-e32f-42f9-8306-102adbed087e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.175984 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ltgvc" event={"ID":"2fcd22e9-e32f-42f9-8306-102adbed087e","Type":"ContainerDied","Data":"274dcbb347da55ad22d5856033de8433cf803d6670b3264a29d0423400ecabb1"} Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.176069 4941 scope.go:117] "RemoveContainer" containerID="3400ab62fc6864e1fdb19e306e2fb64d1ced95f94169e1fe054caa570d42a88d" Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.176130 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ltgvc" Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.208839 4941 scope.go:117] "RemoveContainer" containerID="96a43ede47a9a2933088f1247c021ca93724d7c99a5b150e0bb0bfabe95c5cc1" Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.236743 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ltgvc"] Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.251708 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ltgvc"] Nov 30 07:22:40 crc kubenswrapper[4941]: I1130 07:22:40.252546 4941 scope.go:117] "RemoveContainer" containerID="40a616bdb7c36f5060964a7292867c66955a25938a9ba13396bd329a1a015072" Nov 30 07:22:41 crc kubenswrapper[4941]: I1130 07:22:41.536059 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" path="/var/lib/kubelet/pods/2fcd22e9-e32f-42f9-8306-102adbed087e/volumes" Nov 30 07:23:02 crc kubenswrapper[4941]: I1130 07:23:02.979002 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:23:02 crc kubenswrapper[4941]: I1130 07:23:02.979788 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:23:02 crc kubenswrapper[4941]: I1130 07:23:02.979862 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:23:02 crc kubenswrapper[4941]: I1130 07:23:02.980835 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1eb6c48ece68402030d125c28508740780ac7d91c500b5fc8b309103ac64767d"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:23:02 crc kubenswrapper[4941]: I1130 07:23:02.980945 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://1eb6c48ece68402030d125c28508740780ac7d91c500b5fc8b309103ac64767d" gracePeriod=600 Nov 30 07:23:03 crc kubenswrapper[4941]: I1130 07:23:03.433591 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="1eb6c48ece68402030d125c28508740780ac7d91c500b5fc8b309103ac64767d" exitCode=0 Nov 30 07:23:03 crc kubenswrapper[4941]: I1130 07:23:03.433670 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"1eb6c48ece68402030d125c28508740780ac7d91c500b5fc8b309103ac64767d"} Nov 30 07:23:03 crc kubenswrapper[4941]: I1130 07:23:03.434456 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778"} Nov 30 07:23:03 crc kubenswrapper[4941]: I1130 07:23:03.434570 4941 scope.go:117] "RemoveContainer" containerID="12f389798ce1352a67072353925419e3d86e61cd5dd4dbd3792f372b20b441d1" Nov 30 07:25:32 crc kubenswrapper[4941]: I1130 07:25:32.978273 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:25:32 crc kubenswrapper[4941]: I1130 07:25:32.979436 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:26:02 crc kubenswrapper[4941]: I1130 07:26:02.978945 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:26:02 crc kubenswrapper[4941]: I1130 07:26:02.979949 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:26:32 crc kubenswrapper[4941]: I1130 07:26:32.979187 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:26:32 crc kubenswrapper[4941]: I1130 07:26:32.980388 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:26:32 crc kubenswrapper[4941]: I1130 07:26:32.980490 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:26:32 crc kubenswrapper[4941]: I1130 07:26:32.981630 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:26:32 crc kubenswrapper[4941]: I1130 07:26:32.981736 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" gracePeriod=600 Nov 30 07:26:33 crc kubenswrapper[4941]: E1130 07:26:33.117197 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:26:33 crc kubenswrapper[4941]: I1130 07:26:33.436006 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" exitCode=0 Nov 30 07:26:33 crc kubenswrapper[4941]: I1130 07:26:33.436147 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778"} Nov 30 07:26:33 crc kubenswrapper[4941]: I1130 07:26:33.436645 4941 scope.go:117] "RemoveContainer" containerID="1eb6c48ece68402030d125c28508740780ac7d91c500b5fc8b309103ac64767d" Nov 30 07:26:33 crc kubenswrapper[4941]: I1130 07:26:33.438083 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:26:33 crc kubenswrapper[4941]: E1130 07:26:33.438577 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:26:38 crc kubenswrapper[4941]: I1130 07:26:38.657265 4941 scope.go:117] "RemoveContainer" containerID="c2dca3a8197bbbafcb1aedfd051a225da3e116c3d3ff4148dc065ea6d1fd5bf4" Nov 30 07:26:38 crc kubenswrapper[4941]: I1130 07:26:38.696723 4941 scope.go:117] "RemoveContainer" containerID="50a1cd7bdb39eb6b3fd7c53760edc1cca067982315ea2cce9f6e741fb368c7ac" Nov 30 07:26:38 crc kubenswrapper[4941]: I1130 07:26:38.743010 4941 scope.go:117] "RemoveContainer" containerID="90954aca1553de7e74afe9cf4b75a28ca9f5a6fd13e723e71b4b1c568451e4a2" Nov 30 07:26:48 crc kubenswrapper[4941]: I1130 07:26:48.521483 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:26:48 crc kubenswrapper[4941]: E1130 07:26:48.522376 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:27:00 crc kubenswrapper[4941]: I1130 07:27:00.521710 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:27:00 crc kubenswrapper[4941]: E1130 07:27:00.522688 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:27:11 crc kubenswrapper[4941]: I1130 07:27:11.521613 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:27:11 crc kubenswrapper[4941]: E1130 07:27:11.522519 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:27:26 crc kubenswrapper[4941]: I1130 07:27:26.522837 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:27:26 crc kubenswrapper[4941]: E1130 07:27:26.524239 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:27:40 crc kubenswrapper[4941]: I1130 07:27:40.521500 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:27:40 crc kubenswrapper[4941]: E1130 07:27:40.522578 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:27:55 crc kubenswrapper[4941]: I1130 07:27:55.521926 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:27:55 crc kubenswrapper[4941]: E1130 07:27:55.522799 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:28:08 crc kubenswrapper[4941]: I1130 07:28:08.521113 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:28:08 crc kubenswrapper[4941]: E1130 07:28:08.522031 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:28:22 crc kubenswrapper[4941]: I1130 07:28:22.522778 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:28:22 crc kubenswrapper[4941]: E1130 07:28:22.524896 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:28:34 crc kubenswrapper[4941]: I1130 07:28:34.521562 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:28:34 crc kubenswrapper[4941]: E1130 07:28:34.522613 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:28:49 crc kubenswrapper[4941]: I1130 07:28:49.533981 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:28:49 crc kubenswrapper[4941]: E1130 07:28:49.535700 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.585599 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t27vk"] Nov 30 07:28:59 crc kubenswrapper[4941]: E1130 07:28:59.586369 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="registry-server" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.586381 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="registry-server" Nov 30 07:28:59 crc kubenswrapper[4941]: E1130 07:28:59.593396 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="extract-utilities" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.593420 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="extract-utilities" Nov 30 07:28:59 crc kubenswrapper[4941]: E1130 07:28:59.593434 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="extract-content" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.593443 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="extract-content" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.593648 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fcd22e9-e32f-42f9-8306-102adbed087e" containerName="registry-server" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.594613 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.614359 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t27vk"] Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.731313 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-utilities\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.731384 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-catalog-content\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.731412 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxbhd\" (UniqueName: \"kubernetes.io/projected/80cfeeff-c189-4980-9f77-08ec4b485669-kube-api-access-lxbhd\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.833681 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-utilities\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.833743 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-catalog-content\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.833776 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxbhd\" (UniqueName: \"kubernetes.io/projected/80cfeeff-c189-4980-9f77-08ec4b485669-kube-api-access-lxbhd\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.834353 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-utilities\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.834441 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-catalog-content\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.856167 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxbhd\" (UniqueName: \"kubernetes.io/projected/80cfeeff-c189-4980-9f77-08ec4b485669-kube-api-access-lxbhd\") pod \"certified-operators-t27vk\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:28:59 crc kubenswrapper[4941]: I1130 07:28:59.927099 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:00 crc kubenswrapper[4941]: I1130 07:29:00.208016 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t27vk"] Nov 30 07:29:00 crc kubenswrapper[4941]: I1130 07:29:00.780635 4941 generic.go:334] "Generic (PLEG): container finished" podID="80cfeeff-c189-4980-9f77-08ec4b485669" containerID="24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1" exitCode=0 Nov 30 07:29:00 crc kubenswrapper[4941]: I1130 07:29:00.780717 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t27vk" event={"ID":"80cfeeff-c189-4980-9f77-08ec4b485669","Type":"ContainerDied","Data":"24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1"} Nov 30 07:29:00 crc kubenswrapper[4941]: I1130 07:29:00.781021 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t27vk" event={"ID":"80cfeeff-c189-4980-9f77-08ec4b485669","Type":"ContainerStarted","Data":"23a92c7a4739e3ca00fd279efd8f8eca51c34d697ab10558e4556b3b2b10ed69"} Nov 30 07:29:00 crc kubenswrapper[4941]: I1130 07:29:00.783433 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:29:02 crc kubenswrapper[4941]: I1130 07:29:02.800798 4941 generic.go:334] "Generic (PLEG): container finished" podID="80cfeeff-c189-4980-9f77-08ec4b485669" containerID="174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790" exitCode=0 Nov 30 07:29:02 crc kubenswrapper[4941]: I1130 07:29:02.801197 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t27vk" event={"ID":"80cfeeff-c189-4980-9f77-08ec4b485669","Type":"ContainerDied","Data":"174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790"} Nov 30 07:29:03 crc kubenswrapper[4941]: I1130 07:29:03.522406 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:29:03 crc kubenswrapper[4941]: E1130 07:29:03.522706 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:29:03 crc kubenswrapper[4941]: I1130 07:29:03.813299 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t27vk" event={"ID":"80cfeeff-c189-4980-9f77-08ec4b485669","Type":"ContainerStarted","Data":"cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067"} Nov 30 07:29:03 crc kubenswrapper[4941]: I1130 07:29:03.848956 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t27vk" podStartSLOduration=2.290662991 podStartE2EDuration="4.848903624s" podCreationTimestamp="2025-11-30 07:28:59 +0000 UTC" firstStartedPulling="2025-11-30 07:29:00.78304894 +0000 UTC m=+2561.551220549" lastFinishedPulling="2025-11-30 07:29:03.341289543 +0000 UTC m=+2564.109461182" observedRunningTime="2025-11-30 07:29:03.84196284 +0000 UTC m=+2564.610134449" watchObservedRunningTime="2025-11-30 07:29:03.848903624 +0000 UTC m=+2564.617075243" Nov 30 07:29:09 crc kubenswrapper[4941]: I1130 07:29:09.927551 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:09 crc kubenswrapper[4941]: I1130 07:29:09.927863 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:10 crc kubenswrapper[4941]: I1130 07:29:10.023349 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:10 crc kubenswrapper[4941]: I1130 07:29:10.918221 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:10 crc kubenswrapper[4941]: I1130 07:29:10.982505 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t27vk"] Nov 30 07:29:12 crc kubenswrapper[4941]: I1130 07:29:12.880835 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t27vk" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="registry-server" containerID="cri-o://cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067" gracePeriod=2 Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.832423 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.890393 4941 generic.go:334] "Generic (PLEG): container finished" podID="80cfeeff-c189-4980-9f77-08ec4b485669" containerID="cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067" exitCode=0 Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.890448 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t27vk" event={"ID":"80cfeeff-c189-4980-9f77-08ec4b485669","Type":"ContainerDied","Data":"cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067"} Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.890485 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t27vk" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.890509 4941 scope.go:117] "RemoveContainer" containerID="cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.890489 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t27vk" event={"ID":"80cfeeff-c189-4980-9f77-08ec4b485669","Type":"ContainerDied","Data":"23a92c7a4739e3ca00fd279efd8f8eca51c34d697ab10558e4556b3b2b10ed69"} Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.916096 4941 scope.go:117] "RemoveContainer" containerID="174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.934971 4941 scope.go:117] "RemoveContainer" containerID="24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.973421 4941 scope.go:117] "RemoveContainer" containerID="cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067" Nov 30 07:29:13 crc kubenswrapper[4941]: E1130 07:29:13.973833 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067\": container with ID starting with cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067 not found: ID does not exist" containerID="cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.973872 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067"} err="failed to get container status \"cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067\": rpc error: code = NotFound desc = could not find container \"cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067\": container with ID starting with cbb6125a7dbfdda498bd27923738a37789f5cbb84ef0a1b05352a5926620e067 not found: ID does not exist" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.973893 4941 scope.go:117] "RemoveContainer" containerID="174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790" Nov 30 07:29:13 crc kubenswrapper[4941]: E1130 07:29:13.974094 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790\": container with ID starting with 174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790 not found: ID does not exist" containerID="174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.974122 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790"} err="failed to get container status \"174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790\": rpc error: code = NotFound desc = could not find container \"174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790\": container with ID starting with 174b59f5efe4dd299c5422c46fc3fc0ed0f12dc6218015f3bb3fa68448cfe790 not found: ID does not exist" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.974140 4941 scope.go:117] "RemoveContainer" containerID="24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1" Nov 30 07:29:13 crc kubenswrapper[4941]: E1130 07:29:13.974513 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1\": container with ID starting with 24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1 not found: ID does not exist" containerID="24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1" Nov 30 07:29:13 crc kubenswrapper[4941]: I1130 07:29:13.974581 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1"} err="failed to get container status \"24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1\": rpc error: code = NotFound desc = could not find container \"24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1\": container with ID starting with 24cf3ffea0ad604e457b43ea6809d614ebe8cd33c9bf8f3214147964643192a1 not found: ID does not exist" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.002682 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-utilities\") pod \"80cfeeff-c189-4980-9f77-08ec4b485669\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.002916 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxbhd\" (UniqueName: \"kubernetes.io/projected/80cfeeff-c189-4980-9f77-08ec4b485669-kube-api-access-lxbhd\") pod \"80cfeeff-c189-4980-9f77-08ec4b485669\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.003014 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-catalog-content\") pod \"80cfeeff-c189-4980-9f77-08ec4b485669\" (UID: \"80cfeeff-c189-4980-9f77-08ec4b485669\") " Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.003594 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-utilities" (OuterVolumeSpecName: "utilities") pod "80cfeeff-c189-4980-9f77-08ec4b485669" (UID: "80cfeeff-c189-4980-9f77-08ec4b485669"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.010752 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80cfeeff-c189-4980-9f77-08ec4b485669-kube-api-access-lxbhd" (OuterVolumeSpecName: "kube-api-access-lxbhd") pod "80cfeeff-c189-4980-9f77-08ec4b485669" (UID: "80cfeeff-c189-4980-9f77-08ec4b485669"). InnerVolumeSpecName "kube-api-access-lxbhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.061381 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80cfeeff-c189-4980-9f77-08ec4b485669" (UID: "80cfeeff-c189-4980-9f77-08ec4b485669"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.105601 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.105654 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxbhd\" (UniqueName: \"kubernetes.io/projected/80cfeeff-c189-4980-9f77-08ec4b485669-kube-api-access-lxbhd\") on node \"crc\" DevicePath \"\"" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.105667 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80cfeeff-c189-4980-9f77-08ec4b485669-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.232114 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t27vk"] Nov 30 07:29:14 crc kubenswrapper[4941]: I1130 07:29:14.240575 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t27vk"] Nov 30 07:29:15 crc kubenswrapper[4941]: I1130 07:29:15.535986 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" path="/var/lib/kubelet/pods/80cfeeff-c189-4980-9f77-08ec4b485669/volumes" Nov 30 07:29:17 crc kubenswrapper[4941]: I1130 07:29:17.522673 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:29:17 crc kubenswrapper[4941]: E1130 07:29:17.523340 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:29:30 crc kubenswrapper[4941]: I1130 07:29:30.522451 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:29:30 crc kubenswrapper[4941]: E1130 07:29:30.523685 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:29:42 crc kubenswrapper[4941]: I1130 07:29:42.522385 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:29:42 crc kubenswrapper[4941]: E1130 07:29:42.524090 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:29:54 crc kubenswrapper[4941]: I1130 07:29:54.522185 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:29:54 crc kubenswrapper[4941]: E1130 07:29:54.523028 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.191252 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp"] Nov 30 07:30:00 crc kubenswrapper[4941]: E1130 07:30:00.193049 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="extract-content" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.193169 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="extract-content" Nov 30 07:30:00 crc kubenswrapper[4941]: E1130 07:30:00.193257 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="extract-utilities" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.193359 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="extract-utilities" Nov 30 07:30:00 crc kubenswrapper[4941]: E1130 07:30:00.193459 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="registry-server" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.193536 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="registry-server" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.193779 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="80cfeeff-c189-4980-9f77-08ec4b485669" containerName="registry-server" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.194480 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.197089 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.197511 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.203678 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp"] Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.273961 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/415240af-6b19-4e94-b812-539d0b5c5cb4-secret-volume\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.274269 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ddbh\" (UniqueName: \"kubernetes.io/projected/415240af-6b19-4e94-b812-539d0b5c5cb4-kube-api-access-8ddbh\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.274318 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/415240af-6b19-4e94-b812-539d0b5c5cb4-config-volume\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.375668 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/415240af-6b19-4e94-b812-539d0b5c5cb4-secret-volume\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.375738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ddbh\" (UniqueName: \"kubernetes.io/projected/415240af-6b19-4e94-b812-539d0b5c5cb4-kube-api-access-8ddbh\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.375817 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/415240af-6b19-4e94-b812-539d0b5c5cb4-config-volume\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.376605 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/415240af-6b19-4e94-b812-539d0b5c5cb4-config-volume\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.388115 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/415240af-6b19-4e94-b812-539d0b5c5cb4-secret-volume\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.392098 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ddbh\" (UniqueName: \"kubernetes.io/projected/415240af-6b19-4e94-b812-539d0b5c5cb4-kube-api-access-8ddbh\") pod \"collect-profiles-29408130-9lpnp\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.516716 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:00 crc kubenswrapper[4941]: I1130 07:30:00.702166 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp"] Nov 30 07:30:01 crc kubenswrapper[4941]: E1130 07:30:01.050120 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod415240af_6b19_4e94_b812_539d0b5c5cb4.slice/crio-conmon-fc50957275e6897af789e5368e4507822ada6ff6d712ee4209a191fc3712f021.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod415240af_6b19_4e94_b812_539d0b5c5cb4.slice/crio-fc50957275e6897af789e5368e4507822ada6ff6d712ee4209a191fc3712f021.scope\": RecentStats: unable to find data in memory cache]" Nov 30 07:30:01 crc kubenswrapper[4941]: I1130 07:30:01.329451 4941 generic.go:334] "Generic (PLEG): container finished" podID="415240af-6b19-4e94-b812-539d0b5c5cb4" containerID="fc50957275e6897af789e5368e4507822ada6ff6d712ee4209a191fc3712f021" exitCode=0 Nov 30 07:30:01 crc kubenswrapper[4941]: I1130 07:30:01.329508 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" event={"ID":"415240af-6b19-4e94-b812-539d0b5c5cb4","Type":"ContainerDied","Data":"fc50957275e6897af789e5368e4507822ada6ff6d712ee4209a191fc3712f021"} Nov 30 07:30:01 crc kubenswrapper[4941]: I1130 07:30:01.329795 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" event={"ID":"415240af-6b19-4e94-b812-539d0b5c5cb4","Type":"ContainerStarted","Data":"d0293d43d5b6a06d0a5f0709eae5f1909520dd2e09e857427b335975692b2a4c"} Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.622487 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.706857 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/415240af-6b19-4e94-b812-539d0b5c5cb4-secret-volume\") pod \"415240af-6b19-4e94-b812-539d0b5c5cb4\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.706970 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/415240af-6b19-4e94-b812-539d0b5c5cb4-config-volume\") pod \"415240af-6b19-4e94-b812-539d0b5c5cb4\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.707016 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ddbh\" (UniqueName: \"kubernetes.io/projected/415240af-6b19-4e94-b812-539d0b5c5cb4-kube-api-access-8ddbh\") pod \"415240af-6b19-4e94-b812-539d0b5c5cb4\" (UID: \"415240af-6b19-4e94-b812-539d0b5c5cb4\") " Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.708584 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/415240af-6b19-4e94-b812-539d0b5c5cb4-config-volume" (OuterVolumeSpecName: "config-volume") pod "415240af-6b19-4e94-b812-539d0b5c5cb4" (UID: "415240af-6b19-4e94-b812-539d0b5c5cb4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.715077 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/415240af-6b19-4e94-b812-539d0b5c5cb4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "415240af-6b19-4e94-b812-539d0b5c5cb4" (UID: "415240af-6b19-4e94-b812-539d0b5c5cb4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.715276 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/415240af-6b19-4e94-b812-539d0b5c5cb4-kube-api-access-8ddbh" (OuterVolumeSpecName: "kube-api-access-8ddbh") pod "415240af-6b19-4e94-b812-539d0b5c5cb4" (UID: "415240af-6b19-4e94-b812-539d0b5c5cb4"). InnerVolumeSpecName "kube-api-access-8ddbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.808782 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/415240af-6b19-4e94-b812-539d0b5c5cb4-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.809023 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/415240af-6b19-4e94-b812-539d0b5c5cb4-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:30:02 crc kubenswrapper[4941]: I1130 07:30:02.809034 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ddbh\" (UniqueName: \"kubernetes.io/projected/415240af-6b19-4e94-b812-539d0b5c5cb4-kube-api-access-8ddbh\") on node \"crc\" DevicePath \"\"" Nov 30 07:30:03 crc kubenswrapper[4941]: I1130 07:30:03.344502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" event={"ID":"415240af-6b19-4e94-b812-539d0b5c5cb4","Type":"ContainerDied","Data":"d0293d43d5b6a06d0a5f0709eae5f1909520dd2e09e857427b335975692b2a4c"} Nov 30 07:30:03 crc kubenswrapper[4941]: I1130 07:30:03.344541 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0293d43d5b6a06d0a5f0709eae5f1909520dd2e09e857427b335975692b2a4c" Nov 30 07:30:03 crc kubenswrapper[4941]: I1130 07:30:03.344626 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp" Nov 30 07:30:03 crc kubenswrapper[4941]: I1130 07:30:03.693818 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j"] Nov 30 07:30:03 crc kubenswrapper[4941]: I1130 07:30:03.698192 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408085-x6l8j"] Nov 30 07:30:05 crc kubenswrapper[4941]: I1130 07:30:05.532750 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="773d34d4-1723-42c4-8b83-43f629b630c2" path="/var/lib/kubelet/pods/773d34d4-1723-42c4-8b83-43f629b630c2/volumes" Nov 30 07:30:09 crc kubenswrapper[4941]: I1130 07:30:09.530023 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:30:09 crc kubenswrapper[4941]: E1130 07:30:09.530804 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:30:22 crc kubenswrapper[4941]: I1130 07:30:22.521689 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:30:22 crc kubenswrapper[4941]: E1130 07:30:22.522551 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:30:34 crc kubenswrapper[4941]: I1130 07:30:34.523092 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:30:34 crc kubenswrapper[4941]: E1130 07:30:34.524602 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:30:38 crc kubenswrapper[4941]: I1130 07:30:38.933856 4941 scope.go:117] "RemoveContainer" containerID="cb07af51b3105ae67f0fe232c46fd6912a357c344881a250b64edafdf6ae76ab" Nov 30 07:30:39 crc kubenswrapper[4941]: I1130 07:30:39.948454 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8n7wr"] Nov 30 07:30:39 crc kubenswrapper[4941]: E1130 07:30:39.949279 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="415240af-6b19-4e94-b812-539d0b5c5cb4" containerName="collect-profiles" Nov 30 07:30:39 crc kubenswrapper[4941]: I1130 07:30:39.949296 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="415240af-6b19-4e94-b812-539d0b5c5cb4" containerName="collect-profiles" Nov 30 07:30:39 crc kubenswrapper[4941]: I1130 07:30:39.949549 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="415240af-6b19-4e94-b812-539d0b5c5cb4" containerName="collect-profiles" Nov 30 07:30:39 crc kubenswrapper[4941]: I1130 07:30:39.951091 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:39 crc kubenswrapper[4941]: I1130 07:30:39.962631 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8n7wr"] Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.049750 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrfzp\" (UniqueName: \"kubernetes.io/projected/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-kube-api-access-lrfzp\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.049848 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-catalog-content\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.049979 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-utilities\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.152186 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-catalog-content\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.152254 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-utilities\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.152371 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrfzp\" (UniqueName: \"kubernetes.io/projected/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-kube-api-access-lrfzp\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.152959 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-catalog-content\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.153048 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-utilities\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.179933 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrfzp\" (UniqueName: \"kubernetes.io/projected/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-kube-api-access-lrfzp\") pod \"redhat-operators-8n7wr\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.272609 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:40 crc kubenswrapper[4941]: I1130 07:30:40.724736 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8n7wr"] Nov 30 07:30:41 crc kubenswrapper[4941]: I1130 07:30:41.707863 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerID="4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982" exitCode=0 Nov 30 07:30:41 crc kubenswrapper[4941]: I1130 07:30:41.707983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerDied","Data":"4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982"} Nov 30 07:30:41 crc kubenswrapper[4941]: I1130 07:30:41.708515 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerStarted","Data":"91ee70d9e5a98a0c63c218bbab983b400c010e0ef734477ca04e34f6183bf02d"} Nov 30 07:30:42 crc kubenswrapper[4941]: I1130 07:30:42.737027 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerStarted","Data":"f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8"} Nov 30 07:30:43 crc kubenswrapper[4941]: I1130 07:30:43.750731 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerID="f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8" exitCode=0 Nov 30 07:30:43 crc kubenswrapper[4941]: I1130 07:30:43.750847 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerDied","Data":"f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8"} Nov 30 07:30:44 crc kubenswrapper[4941]: I1130 07:30:44.769706 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerStarted","Data":"2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0"} Nov 30 07:30:44 crc kubenswrapper[4941]: I1130 07:30:44.806539 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8n7wr" podStartSLOduration=3.311888887 podStartE2EDuration="5.80652133s" podCreationTimestamp="2025-11-30 07:30:39 +0000 UTC" firstStartedPulling="2025-11-30 07:30:41.711793027 +0000 UTC m=+2662.479964636" lastFinishedPulling="2025-11-30 07:30:44.20642544 +0000 UTC m=+2664.974597079" observedRunningTime="2025-11-30 07:30:44.805970822 +0000 UTC m=+2665.574142481" watchObservedRunningTime="2025-11-30 07:30:44.80652133 +0000 UTC m=+2665.574692939" Nov 30 07:30:46 crc kubenswrapper[4941]: I1130 07:30:46.522875 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:30:46 crc kubenswrapper[4941]: E1130 07:30:46.523485 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:30:50 crc kubenswrapper[4941]: I1130 07:30:50.272805 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:50 crc kubenswrapper[4941]: I1130 07:30:50.273852 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:30:51 crc kubenswrapper[4941]: I1130 07:30:51.351432 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8n7wr" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="registry-server" probeResult="failure" output=< Nov 30 07:30:51 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 07:30:51 crc kubenswrapper[4941]: > Nov 30 07:31:00 crc kubenswrapper[4941]: I1130 07:31:00.339705 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:31:00 crc kubenswrapper[4941]: I1130 07:31:00.413375 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:31:00 crc kubenswrapper[4941]: I1130 07:31:00.521920 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:31:00 crc kubenswrapper[4941]: E1130 07:31:00.522545 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:31:00 crc kubenswrapper[4941]: I1130 07:31:00.587116 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8n7wr"] Nov 30 07:31:01 crc kubenswrapper[4941]: I1130 07:31:01.932586 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8n7wr" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="registry-server" containerID="cri-o://2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0" gracePeriod=2 Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.356377 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.484040 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-catalog-content\") pod \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.484083 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrfzp\" (UniqueName: \"kubernetes.io/projected/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-kube-api-access-lrfzp\") pod \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.484164 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-utilities\") pod \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\" (UID: \"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc\") " Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.485564 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-utilities" (OuterVolumeSpecName: "utilities") pod "7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" (UID: "7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.489745 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-kube-api-access-lrfzp" (OuterVolumeSpecName: "kube-api-access-lrfzp") pod "7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" (UID: "7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc"). InnerVolumeSpecName "kube-api-access-lrfzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.585508 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrfzp\" (UniqueName: \"kubernetes.io/projected/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-kube-api-access-lrfzp\") on node \"crc\" DevicePath \"\"" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.585552 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.637574 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" (UID: "7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.687108 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.941626 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerID="2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0" exitCode=0 Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.941677 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerDied","Data":"2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0"} Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.941716 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8n7wr" event={"ID":"7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc","Type":"ContainerDied","Data":"91ee70d9e5a98a0c63c218bbab983b400c010e0ef734477ca04e34f6183bf02d"} Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.941738 4941 scope.go:117] "RemoveContainer" containerID="2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.941732 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8n7wr" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.963295 4941 scope.go:117] "RemoveContainer" containerID="f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.987515 4941 scope.go:117] "RemoveContainer" containerID="4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982" Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.989176 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8n7wr"] Nov 30 07:31:02 crc kubenswrapper[4941]: I1130 07:31:02.993975 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8n7wr"] Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.012253 4941 scope.go:117] "RemoveContainer" containerID="2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0" Nov 30 07:31:03 crc kubenswrapper[4941]: E1130 07:31:03.012832 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0\": container with ID starting with 2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0 not found: ID does not exist" containerID="2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0" Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.012935 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0"} err="failed to get container status \"2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0\": rpc error: code = NotFound desc = could not find container \"2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0\": container with ID starting with 2505777f56644ffe859833af96d842b85f71d718613fd6ba9077a12bb74ed2b0 not found: ID does not exist" Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.013025 4941 scope.go:117] "RemoveContainer" containerID="f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8" Nov 30 07:31:03 crc kubenswrapper[4941]: E1130 07:31:03.013441 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8\": container with ID starting with f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8 not found: ID does not exist" containerID="f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8" Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.013504 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8"} err="failed to get container status \"f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8\": rpc error: code = NotFound desc = could not find container \"f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8\": container with ID starting with f916928542c2a97e627553f7ca901bbbfc22e81a27e8920a103aae9cb7cfe6a8 not found: ID does not exist" Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.013547 4941 scope.go:117] "RemoveContainer" containerID="4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982" Nov 30 07:31:03 crc kubenswrapper[4941]: E1130 07:31:03.013919 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982\": container with ID starting with 4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982 not found: ID does not exist" containerID="4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982" Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.014025 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982"} err="failed to get container status \"4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982\": rpc error: code = NotFound desc = could not find container \"4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982\": container with ID starting with 4f7d90d4f5aa367044d26e789e7d75710995d2f453569db587f0d02aafff4982 not found: ID does not exist" Nov 30 07:31:03 crc kubenswrapper[4941]: I1130 07:31:03.535185 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" path="/var/lib/kubelet/pods/7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc/volumes" Nov 30 07:31:15 crc kubenswrapper[4941]: I1130 07:31:15.522372 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:31:15 crc kubenswrapper[4941]: E1130 07:31:15.523540 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.390367 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8pt26"] Nov 30 07:31:18 crc kubenswrapper[4941]: E1130 07:31:18.391302 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="extract-content" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.391349 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="extract-content" Nov 30 07:31:18 crc kubenswrapper[4941]: E1130 07:31:18.391425 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="registry-server" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.391439 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="registry-server" Nov 30 07:31:18 crc kubenswrapper[4941]: E1130 07:31:18.391461 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="extract-utilities" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.391475 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="extract-utilities" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.391730 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dac2e9a-8a4b-4f35-bb7e-f1bfc52c91bc" containerName="registry-server" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.393773 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.398447 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pt26"] Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.508191 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l8q5\" (UniqueName: \"kubernetes.io/projected/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-kube-api-access-8l8q5\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.508465 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-utilities\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.508559 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-catalog-content\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.609901 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l8q5\" (UniqueName: \"kubernetes.io/projected/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-kube-api-access-8l8q5\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.610061 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-utilities\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.610116 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-catalog-content\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.610672 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-utilities\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.610806 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-catalog-content\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.630842 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l8q5\" (UniqueName: \"kubernetes.io/projected/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-kube-api-access-8l8q5\") pod \"redhat-marketplace-8pt26\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:18 crc kubenswrapper[4941]: I1130 07:31:18.734668 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:19 crc kubenswrapper[4941]: I1130 07:31:19.162004 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pt26"] Nov 30 07:31:20 crc kubenswrapper[4941]: I1130 07:31:20.099076 4941 generic.go:334] "Generic (PLEG): container finished" podID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerID="777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a" exitCode=0 Nov 30 07:31:20 crc kubenswrapper[4941]: I1130 07:31:20.099198 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pt26" event={"ID":"39bceb3a-b2d1-4741-8d63-b1a85408fcc9","Type":"ContainerDied","Data":"777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a"} Nov 30 07:31:20 crc kubenswrapper[4941]: I1130 07:31:20.099414 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pt26" event={"ID":"39bceb3a-b2d1-4741-8d63-b1a85408fcc9","Type":"ContainerStarted","Data":"f1308896c0b6ede8fd0914bea12d7d524bdd6aff3d9f0083ccd1f7ead3adaed4"} Nov 30 07:31:21 crc kubenswrapper[4941]: I1130 07:31:21.108486 4941 generic.go:334] "Generic (PLEG): container finished" podID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerID="66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0" exitCode=0 Nov 30 07:31:21 crc kubenswrapper[4941]: I1130 07:31:21.108597 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pt26" event={"ID":"39bceb3a-b2d1-4741-8d63-b1a85408fcc9","Type":"ContainerDied","Data":"66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0"} Nov 30 07:31:22 crc kubenswrapper[4941]: I1130 07:31:22.122248 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pt26" event={"ID":"39bceb3a-b2d1-4741-8d63-b1a85408fcc9","Type":"ContainerStarted","Data":"e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4"} Nov 30 07:31:22 crc kubenswrapper[4941]: I1130 07:31:22.156695 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8pt26" podStartSLOduration=2.724659 podStartE2EDuration="4.156671252s" podCreationTimestamp="2025-11-30 07:31:18 +0000 UTC" firstStartedPulling="2025-11-30 07:31:20.102779659 +0000 UTC m=+2700.870951298" lastFinishedPulling="2025-11-30 07:31:21.534791941 +0000 UTC m=+2702.302963550" observedRunningTime="2025-11-30 07:31:22.155030021 +0000 UTC m=+2702.923201640" watchObservedRunningTime="2025-11-30 07:31:22.156671252 +0000 UTC m=+2702.924842871" Nov 30 07:31:28 crc kubenswrapper[4941]: I1130 07:31:28.734824 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:28 crc kubenswrapper[4941]: I1130 07:31:28.735796 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:28 crc kubenswrapper[4941]: I1130 07:31:28.786706 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:29 crc kubenswrapper[4941]: I1130 07:31:29.269442 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:30 crc kubenswrapper[4941]: I1130 07:31:30.170032 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pt26"] Nov 30 07:31:30 crc kubenswrapper[4941]: I1130 07:31:30.522569 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:31:30 crc kubenswrapper[4941]: E1130 07:31:30.522882 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.226320 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8pt26" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="registry-server" containerID="cri-o://e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4" gracePeriod=2 Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.706756 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.830501 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-utilities\") pod \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.830557 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-catalog-content\") pod \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.830596 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l8q5\" (UniqueName: \"kubernetes.io/projected/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-kube-api-access-8l8q5\") pod \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\" (UID: \"39bceb3a-b2d1-4741-8d63-b1a85408fcc9\") " Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.832277 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-utilities" (OuterVolumeSpecName: "utilities") pod "39bceb3a-b2d1-4741-8d63-b1a85408fcc9" (UID: "39bceb3a-b2d1-4741-8d63-b1a85408fcc9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.840825 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-kube-api-access-8l8q5" (OuterVolumeSpecName: "kube-api-access-8l8q5") pod "39bceb3a-b2d1-4741-8d63-b1a85408fcc9" (UID: "39bceb3a-b2d1-4741-8d63-b1a85408fcc9"). InnerVolumeSpecName "kube-api-access-8l8q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.854970 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39bceb3a-b2d1-4741-8d63-b1a85408fcc9" (UID: "39bceb3a-b2d1-4741-8d63-b1a85408fcc9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.932503 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.933038 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:31:31 crc kubenswrapper[4941]: I1130 07:31:31.933059 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l8q5\" (UniqueName: \"kubernetes.io/projected/39bceb3a-b2d1-4741-8d63-b1a85408fcc9-kube-api-access-8l8q5\") on node \"crc\" DevicePath \"\"" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.245737 4941 generic.go:334] "Generic (PLEG): container finished" podID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerID="e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4" exitCode=0 Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.245892 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pt26" event={"ID":"39bceb3a-b2d1-4741-8d63-b1a85408fcc9","Type":"ContainerDied","Data":"e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4"} Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.247521 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pt26" event={"ID":"39bceb3a-b2d1-4741-8d63-b1a85408fcc9","Type":"ContainerDied","Data":"f1308896c0b6ede8fd0914bea12d7d524bdd6aff3d9f0083ccd1f7ead3adaed4"} Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.247559 4941 scope.go:117] "RemoveContainer" containerID="e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.245978 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pt26" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.289194 4941 scope.go:117] "RemoveContainer" containerID="66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.314667 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pt26"] Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.337950 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pt26"] Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.339843 4941 scope.go:117] "RemoveContainer" containerID="777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.365593 4941 scope.go:117] "RemoveContainer" containerID="e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4" Nov 30 07:31:32 crc kubenswrapper[4941]: E1130 07:31:32.366079 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4\": container with ID starting with e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4 not found: ID does not exist" containerID="e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.366115 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4"} err="failed to get container status \"e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4\": rpc error: code = NotFound desc = could not find container \"e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4\": container with ID starting with e5c76719cc302e34e8d550b766fdc436de66f642bcb1210a74b3e132b560f0b4 not found: ID does not exist" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.366142 4941 scope.go:117] "RemoveContainer" containerID="66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0" Nov 30 07:31:32 crc kubenswrapper[4941]: E1130 07:31:32.366683 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0\": container with ID starting with 66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0 not found: ID does not exist" containerID="66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.366818 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0"} err="failed to get container status \"66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0\": rpc error: code = NotFound desc = could not find container \"66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0\": container with ID starting with 66ac5db0f874ed25890aa48444a8751aa99d8e4aad765438958b08e63ad347d0 not found: ID does not exist" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.366929 4941 scope.go:117] "RemoveContainer" containerID="777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a" Nov 30 07:31:32 crc kubenswrapper[4941]: E1130 07:31:32.367588 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a\": container with ID starting with 777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a not found: ID does not exist" containerID="777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a" Nov 30 07:31:32 crc kubenswrapper[4941]: I1130 07:31:32.367623 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a"} err="failed to get container status \"777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a\": rpc error: code = NotFound desc = could not find container \"777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a\": container with ID starting with 777765d2a73fe667584d0983b14aa35434b24af5a3517b41aa4a9802ce4cfb7a not found: ID does not exist" Nov 30 07:31:33 crc kubenswrapper[4941]: I1130 07:31:33.539774 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" path="/var/lib/kubelet/pods/39bceb3a-b2d1-4741-8d63-b1a85408fcc9/volumes" Nov 30 07:31:44 crc kubenswrapper[4941]: I1130 07:31:44.522679 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:31:45 crc kubenswrapper[4941]: I1130 07:31:45.383018 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"ee5bbfe76db79d6ca76ed246717ee694e1e3041d29a58f0aefbda086ca91d160"} Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.118711 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 07:32:34 crc kubenswrapper[4941]: E1130 07:32:34.120535 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="registry-server" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.120643 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="registry-server" Nov 30 07:32:34 crc kubenswrapper[4941]: E1130 07:32:34.120744 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="extract-content" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.120836 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="extract-content" Nov 30 07:32:34 crc kubenswrapper[4941]: E1130 07:32:34.120915 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="extract-utilities" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.120997 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="extract-utilities" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.121289 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="39bceb3a-b2d1-4741-8d63-b1a85408fcc9" containerName="registry-server" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.123826 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.132734 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.248339 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-utilities\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.248399 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-catalog-content\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.248435 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pskr7\" (UniqueName: \"kubernetes.io/projected/f448deec-d51f-4448-8ff6-95f1177f3c19-kube-api-access-pskr7\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.349861 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-utilities\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.349944 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-catalog-content\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.349981 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pskr7\" (UniqueName: \"kubernetes.io/projected/f448deec-d51f-4448-8ff6-95f1177f3c19-kube-api-access-pskr7\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.350439 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-utilities\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.350813 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-catalog-content\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.382122 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pskr7\" (UniqueName: \"kubernetes.io/projected/f448deec-d51f-4448-8ff6-95f1177f3c19-kube-api-access-pskr7\") pod \"community-operators-sx66t\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.454051 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:34 crc kubenswrapper[4941]: I1130 07:32:34.984255 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 07:32:35 crc kubenswrapper[4941]: I1130 07:32:35.837238 4941 generic.go:334] "Generic (PLEG): container finished" podID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerID="b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d" exitCode=0 Nov 30 07:32:35 crc kubenswrapper[4941]: I1130 07:32:35.837409 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sx66t" event={"ID":"f448deec-d51f-4448-8ff6-95f1177f3c19","Type":"ContainerDied","Data":"b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d"} Nov 30 07:32:35 crc kubenswrapper[4941]: I1130 07:32:35.837579 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sx66t" event={"ID":"f448deec-d51f-4448-8ff6-95f1177f3c19","Type":"ContainerStarted","Data":"f1ccd14aa2bbad61bea3470a6c71fd485d79d0c629ba1cf6b79de414d3751514"} Nov 30 07:32:40 crc kubenswrapper[4941]: I1130 07:32:40.901468 4941 generic.go:334] "Generic (PLEG): container finished" podID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerID="0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720" exitCode=0 Nov 30 07:32:40 crc kubenswrapper[4941]: I1130 07:32:40.901565 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sx66t" event={"ID":"f448deec-d51f-4448-8ff6-95f1177f3c19","Type":"ContainerDied","Data":"0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720"} Nov 30 07:32:41 crc kubenswrapper[4941]: I1130 07:32:41.911853 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sx66t" event={"ID":"f448deec-d51f-4448-8ff6-95f1177f3c19","Type":"ContainerStarted","Data":"5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77"} Nov 30 07:32:41 crc kubenswrapper[4941]: I1130 07:32:41.932493 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sx66t" podStartSLOduration=2.287363894 podStartE2EDuration="7.932472037s" podCreationTimestamp="2025-11-30 07:32:34 +0000 UTC" firstStartedPulling="2025-11-30 07:32:35.839643559 +0000 UTC m=+2776.607815168" lastFinishedPulling="2025-11-30 07:32:41.484751702 +0000 UTC m=+2782.252923311" observedRunningTime="2025-11-30 07:32:41.930867088 +0000 UTC m=+2782.699038727" watchObservedRunningTime="2025-11-30 07:32:41.932472037 +0000 UTC m=+2782.700643656" Nov 30 07:32:44 crc kubenswrapper[4941]: I1130 07:32:44.454546 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:44 crc kubenswrapper[4941]: I1130 07:32:44.455395 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:44 crc kubenswrapper[4941]: I1130 07:32:44.532412 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:54 crc kubenswrapper[4941]: I1130 07:32:54.524411 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sx66t" Nov 30 07:32:54 crc kubenswrapper[4941]: I1130 07:32:54.743676 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 07:32:54 crc kubenswrapper[4941]: I1130 07:32:54.800232 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5ntbl"] Nov 30 07:32:54 crc kubenswrapper[4941]: I1130 07:32:54.800577 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5ntbl" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="registry-server" containerID="cri-o://63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6" gracePeriod=2 Nov 30 07:32:55 crc kubenswrapper[4941]: I1130 07:32:55.868943 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.043101 4941 generic.go:334] "Generic (PLEG): container finished" podID="876e6120-1086-4d94-a98d-a757228000ae" containerID="63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6" exitCode=0 Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.043152 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5ntbl" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.043154 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntbl" event={"ID":"876e6120-1086-4d94-a98d-a757228000ae","Type":"ContainerDied","Data":"63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6"} Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.043199 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntbl" event={"ID":"876e6120-1086-4d94-a98d-a757228000ae","Type":"ContainerDied","Data":"1d69989f7bd29f3dc43f77b629220abe8db4e8f357174484db835268ca14d5c6"} Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.043219 4941 scope.go:117] "RemoveContainer" containerID="63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.061920 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-catalog-content\") pod \"876e6120-1086-4d94-a98d-a757228000ae\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.062031 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjr5c\" (UniqueName: \"kubernetes.io/projected/876e6120-1086-4d94-a98d-a757228000ae-kube-api-access-tjr5c\") pod \"876e6120-1086-4d94-a98d-a757228000ae\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.062099 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-utilities\") pod \"876e6120-1086-4d94-a98d-a757228000ae\" (UID: \"876e6120-1086-4d94-a98d-a757228000ae\") " Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.062782 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-utilities" (OuterVolumeSpecName: "utilities") pod "876e6120-1086-4d94-a98d-a757228000ae" (UID: "876e6120-1086-4d94-a98d-a757228000ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.067214 4941 scope.go:117] "RemoveContainer" containerID="787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.071611 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/876e6120-1086-4d94-a98d-a757228000ae-kube-api-access-tjr5c" (OuterVolumeSpecName: "kube-api-access-tjr5c") pod "876e6120-1086-4d94-a98d-a757228000ae" (UID: "876e6120-1086-4d94-a98d-a757228000ae"). InnerVolumeSpecName "kube-api-access-tjr5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.114383 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "876e6120-1086-4d94-a98d-a757228000ae" (UID: "876e6120-1086-4d94-a98d-a757228000ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.139662 4941 scope.go:117] "RemoveContainer" containerID="9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.164178 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.164216 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/876e6120-1086-4d94-a98d-a757228000ae-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.164230 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjr5c\" (UniqueName: \"kubernetes.io/projected/876e6120-1086-4d94-a98d-a757228000ae-kube-api-access-tjr5c\") on node \"crc\" DevicePath \"\"" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.165308 4941 scope.go:117] "RemoveContainer" containerID="63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6" Nov 30 07:32:56 crc kubenswrapper[4941]: E1130 07:32:56.165799 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6\": container with ID starting with 63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6 not found: ID does not exist" containerID="63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.165830 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6"} err="failed to get container status \"63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6\": rpc error: code = NotFound desc = could not find container \"63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6\": container with ID starting with 63ac8d4871c9ab6addb58fb7541ac50d7b7ed3d9e1092e01115fe6c66337d8f6 not found: ID does not exist" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.165850 4941 scope.go:117] "RemoveContainer" containerID="787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833" Nov 30 07:32:56 crc kubenswrapper[4941]: E1130 07:32:56.166066 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833\": container with ID starting with 787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833 not found: ID does not exist" containerID="787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.166085 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833"} err="failed to get container status \"787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833\": rpc error: code = NotFound desc = could not find container \"787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833\": container with ID starting with 787b9570de87a4370ccfc8a72d466b443f6083ca341a993082c11fd76e857833 not found: ID does not exist" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.166098 4941 scope.go:117] "RemoveContainer" containerID="9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d" Nov 30 07:32:56 crc kubenswrapper[4941]: E1130 07:32:56.166336 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d\": container with ID starting with 9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d not found: ID does not exist" containerID="9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.166359 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d"} err="failed to get container status \"9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d\": rpc error: code = NotFound desc = could not find container \"9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d\": container with ID starting with 9f1d7a825ae6b0d032b4048917cd144bb4382572ffa53ff95db6facdfe35549d not found: ID does not exist" Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.379438 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5ntbl"] Nov 30 07:32:56 crc kubenswrapper[4941]: I1130 07:32:56.386001 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5ntbl"] Nov 30 07:32:57 crc kubenswrapper[4941]: I1130 07:32:57.548490 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="876e6120-1086-4d94-a98d-a757228000ae" path="/var/lib/kubelet/pods/876e6120-1086-4d94-a98d-a757228000ae/volumes" Nov 30 07:34:02 crc kubenswrapper[4941]: I1130 07:34:02.978213 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:34:02 crc kubenswrapper[4941]: I1130 07:34:02.978991 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:34:32 crc kubenswrapper[4941]: I1130 07:34:32.978620 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:34:32 crc kubenswrapper[4941]: I1130 07:34:32.979909 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:35:02 crc kubenswrapper[4941]: I1130 07:35:02.978911 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:35:02 crc kubenswrapper[4941]: I1130 07:35:02.980197 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:35:02 crc kubenswrapper[4941]: I1130 07:35:02.980351 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:35:02 crc kubenswrapper[4941]: I1130 07:35:02.981924 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ee5bbfe76db79d6ca76ed246717ee694e1e3041d29a58f0aefbda086ca91d160"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:35:02 crc kubenswrapper[4941]: I1130 07:35:02.982072 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://ee5bbfe76db79d6ca76ed246717ee694e1e3041d29a58f0aefbda086ca91d160" gracePeriod=600 Nov 30 07:35:03 crc kubenswrapper[4941]: I1130 07:35:03.319605 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="ee5bbfe76db79d6ca76ed246717ee694e1e3041d29a58f0aefbda086ca91d160" exitCode=0 Nov 30 07:35:03 crc kubenswrapper[4941]: I1130 07:35:03.319717 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"ee5bbfe76db79d6ca76ed246717ee694e1e3041d29a58f0aefbda086ca91d160"} Nov 30 07:35:03 crc kubenswrapper[4941]: I1130 07:35:03.320078 4941 scope.go:117] "RemoveContainer" containerID="cc6728a30c3ff2c5883cadfb9cd6d367166c92934cf9a6b90fb9ff2a05901778" Nov 30 07:35:04 crc kubenswrapper[4941]: I1130 07:35:04.337196 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf"} Nov 30 07:37:32 crc kubenswrapper[4941]: I1130 07:37:32.979217 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:37:32 crc kubenswrapper[4941]: I1130 07:37:32.980209 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:38:02 crc kubenswrapper[4941]: I1130 07:38:02.979030 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:38:02 crc kubenswrapper[4941]: I1130 07:38:02.979530 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:38:32 crc kubenswrapper[4941]: I1130 07:38:32.980456 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:38:32 crc kubenswrapper[4941]: I1130 07:38:32.981436 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:38:32 crc kubenswrapper[4941]: I1130 07:38:32.982074 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:38:32 crc kubenswrapper[4941]: I1130 07:38:32.983121 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:38:32 crc kubenswrapper[4941]: I1130 07:38:32.983271 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" gracePeriod=600 Nov 30 07:38:33 crc kubenswrapper[4941]: E1130 07:38:33.118187 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:38:33 crc kubenswrapper[4941]: I1130 07:38:33.480685 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" exitCode=0 Nov 30 07:38:33 crc kubenswrapper[4941]: I1130 07:38:33.480797 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf"} Nov 30 07:38:33 crc kubenswrapper[4941]: I1130 07:38:33.480903 4941 scope.go:117] "RemoveContainer" containerID="ee5bbfe76db79d6ca76ed246717ee694e1e3041d29a58f0aefbda086ca91d160" Nov 30 07:38:33 crc kubenswrapper[4941]: I1130 07:38:33.482420 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:38:33 crc kubenswrapper[4941]: E1130 07:38:33.483206 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:38:44 crc kubenswrapper[4941]: I1130 07:38:44.521400 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:38:44 crc kubenswrapper[4941]: E1130 07:38:44.522627 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:38:58 crc kubenswrapper[4941]: I1130 07:38:58.521885 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:38:58 crc kubenswrapper[4941]: E1130 07:38:58.522934 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:39:10 crc kubenswrapper[4941]: I1130 07:39:10.522371 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:39:10 crc kubenswrapper[4941]: E1130 07:39:10.523265 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:39:22 crc kubenswrapper[4941]: I1130 07:39:22.522610 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:39:22 crc kubenswrapper[4941]: E1130 07:39:22.524140 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:39:33 crc kubenswrapper[4941]: I1130 07:39:33.522163 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:39:33 crc kubenswrapper[4941]: E1130 07:39:33.523589 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.117023 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5z7gg"] Nov 30 07:39:38 crc kubenswrapper[4941]: E1130 07:39:38.118406 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="extract-content" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.118431 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="extract-content" Nov 30 07:39:38 crc kubenswrapper[4941]: E1130 07:39:38.118454 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="registry-server" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.118466 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="registry-server" Nov 30 07:39:38 crc kubenswrapper[4941]: E1130 07:39:38.118493 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="extract-utilities" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.118505 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="extract-utilities" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.118775 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="876e6120-1086-4d94-a98d-a757228000ae" containerName="registry-server" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.120743 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.145739 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5z7gg"] Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.255526 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-utilities\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.255950 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-catalog-content\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.256172 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgjjw\" (UniqueName: \"kubernetes.io/projected/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-kube-api-access-fgjjw\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.358209 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-utilities\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.358579 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-catalog-content\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.358747 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgjjw\" (UniqueName: \"kubernetes.io/projected/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-kube-api-access-fgjjw\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.358864 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-utilities\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.359111 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-catalog-content\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.392096 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgjjw\" (UniqueName: \"kubernetes.io/projected/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-kube-api-access-fgjjw\") pod \"certified-operators-5z7gg\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.460174 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:38 crc kubenswrapper[4941]: I1130 07:39:38.985070 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5z7gg"] Nov 30 07:39:39 crc kubenswrapper[4941]: I1130 07:39:39.116269 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerStarted","Data":"7930dc902bff752b5ac1ad1b9359ac39877d39f865a7d5a2f6174dfa3f43f1fa"} Nov 30 07:39:40 crc kubenswrapper[4941]: I1130 07:39:40.126666 4941 generic.go:334] "Generic (PLEG): container finished" podID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerID="cc203cac72ad5bd925709c330a6997bdae52fd2253033e94b0c7213c523b6a27" exitCode=0 Nov 30 07:39:40 crc kubenswrapper[4941]: I1130 07:39:40.126767 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerDied","Data":"cc203cac72ad5bd925709c330a6997bdae52fd2253033e94b0c7213c523b6a27"} Nov 30 07:39:40 crc kubenswrapper[4941]: I1130 07:39:40.129962 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:39:41 crc kubenswrapper[4941]: I1130 07:39:41.143108 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerStarted","Data":"9968ab453e17a101d3d404924e3be0a6c65543c8407632f1c72c1904ec3eae16"} Nov 30 07:39:42 crc kubenswrapper[4941]: I1130 07:39:42.164860 4941 generic.go:334] "Generic (PLEG): container finished" podID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerID="9968ab453e17a101d3d404924e3be0a6c65543c8407632f1c72c1904ec3eae16" exitCode=0 Nov 30 07:39:42 crc kubenswrapper[4941]: I1130 07:39:42.164946 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerDied","Data":"9968ab453e17a101d3d404924e3be0a6c65543c8407632f1c72c1904ec3eae16"} Nov 30 07:39:43 crc kubenswrapper[4941]: I1130 07:39:43.179088 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerStarted","Data":"b5b63bec357c493d63472706c174571df0671a4c73148b725a752c28c0c2da9b"} Nov 30 07:39:43 crc kubenswrapper[4941]: I1130 07:39:43.205755 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5z7gg" podStartSLOduration=2.699074401 podStartE2EDuration="5.205733956s" podCreationTimestamp="2025-11-30 07:39:38 +0000 UTC" firstStartedPulling="2025-11-30 07:39:40.129419712 +0000 UTC m=+3200.897591361" lastFinishedPulling="2025-11-30 07:39:42.636079277 +0000 UTC m=+3203.404250916" observedRunningTime="2025-11-30 07:39:43.202565528 +0000 UTC m=+3203.970737137" watchObservedRunningTime="2025-11-30 07:39:43.205733956 +0000 UTC m=+3203.973905565" Nov 30 07:39:48 crc kubenswrapper[4941]: I1130 07:39:48.460835 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:48 crc kubenswrapper[4941]: I1130 07:39:48.461573 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:48 crc kubenswrapper[4941]: I1130 07:39:48.522666 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:39:48 crc kubenswrapper[4941]: E1130 07:39:48.523207 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:39:48 crc kubenswrapper[4941]: I1130 07:39:48.531730 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:49 crc kubenswrapper[4941]: I1130 07:39:49.325207 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:51 crc kubenswrapper[4941]: I1130 07:39:51.796231 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5z7gg"] Nov 30 07:39:51 crc kubenswrapper[4941]: I1130 07:39:51.796956 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5z7gg" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="registry-server" containerID="cri-o://b5b63bec357c493d63472706c174571df0671a4c73148b725a752c28c0c2da9b" gracePeriod=2 Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.283970 4941 generic.go:334] "Generic (PLEG): container finished" podID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerID="b5b63bec357c493d63472706c174571df0671a4c73148b725a752c28c0c2da9b" exitCode=0 Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.284025 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerDied","Data":"b5b63bec357c493d63472706c174571df0671a4c73148b725a752c28c0c2da9b"} Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.818036 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.936238 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-utilities\") pod \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.936390 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-catalog-content\") pod \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.936460 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgjjw\" (UniqueName: \"kubernetes.io/projected/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-kube-api-access-fgjjw\") pod \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\" (UID: \"b7998922-0c6b-4af9-9f91-2d26d91e8f1c\") " Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.937200 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-utilities" (OuterVolumeSpecName: "utilities") pod "b7998922-0c6b-4af9-9f91-2d26d91e8f1c" (UID: "b7998922-0c6b-4af9-9f91-2d26d91e8f1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.969543 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-kube-api-access-fgjjw" (OuterVolumeSpecName: "kube-api-access-fgjjw") pod "b7998922-0c6b-4af9-9f91-2d26d91e8f1c" (UID: "b7998922-0c6b-4af9-9f91-2d26d91e8f1c"). InnerVolumeSpecName "kube-api-access-fgjjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:39:52 crc kubenswrapper[4941]: I1130 07:39:52.991288 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7998922-0c6b-4af9-9f91-2d26d91e8f1c" (UID: "b7998922-0c6b-4af9-9f91-2d26d91e8f1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.038125 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.038168 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.038179 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgjjw\" (UniqueName: \"kubernetes.io/projected/b7998922-0c6b-4af9-9f91-2d26d91e8f1c-kube-api-access-fgjjw\") on node \"crc\" DevicePath \"\"" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.292093 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5z7gg" event={"ID":"b7998922-0c6b-4af9-9f91-2d26d91e8f1c","Type":"ContainerDied","Data":"7930dc902bff752b5ac1ad1b9359ac39877d39f865a7d5a2f6174dfa3f43f1fa"} Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.292148 4941 scope.go:117] "RemoveContainer" containerID="b5b63bec357c493d63472706c174571df0671a4c73148b725a752c28c0c2da9b" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.292173 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5z7gg" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.310517 4941 scope.go:117] "RemoveContainer" containerID="9968ab453e17a101d3d404924e3be0a6c65543c8407632f1c72c1904ec3eae16" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.339548 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5z7gg"] Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.343260 4941 scope.go:117] "RemoveContainer" containerID="cc203cac72ad5bd925709c330a6997bdae52fd2253033e94b0c7213c523b6a27" Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.347968 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5z7gg"] Nov 30 07:39:53 crc kubenswrapper[4941]: I1130 07:39:53.530270 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" path="/var/lib/kubelet/pods/b7998922-0c6b-4af9-9f91-2d26d91e8f1c/volumes" Nov 30 07:40:01 crc kubenswrapper[4941]: I1130 07:40:01.522650 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:40:01 crc kubenswrapper[4941]: E1130 07:40:01.523542 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:40:13 crc kubenswrapper[4941]: I1130 07:40:13.522647 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:40:13 crc kubenswrapper[4941]: E1130 07:40:13.523691 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:40:24 crc kubenswrapper[4941]: I1130 07:40:24.521797 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:40:24 crc kubenswrapper[4941]: E1130 07:40:24.524396 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:40:35 crc kubenswrapper[4941]: I1130 07:40:35.522127 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:40:35 crc kubenswrapper[4941]: E1130 07:40:35.522845 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:40:49 crc kubenswrapper[4941]: I1130 07:40:49.530182 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:40:49 crc kubenswrapper[4941]: E1130 07:40:49.531287 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:41:03 crc kubenswrapper[4941]: I1130 07:41:03.521927 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:41:03 crc kubenswrapper[4941]: E1130 07:41:03.522595 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:41:14 crc kubenswrapper[4941]: I1130 07:41:14.522286 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:41:14 crc kubenswrapper[4941]: E1130 07:41:14.523138 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:41:27 crc kubenswrapper[4941]: I1130 07:41:27.522121 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:41:27 crc kubenswrapper[4941]: E1130 07:41:27.523918 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:41:42 crc kubenswrapper[4941]: I1130 07:41:42.522535 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:41:42 crc kubenswrapper[4941]: E1130 07:41:42.523646 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:41:54 crc kubenswrapper[4941]: I1130 07:41:54.522528 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:41:54 crc kubenswrapper[4941]: E1130 07:41:54.523969 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:42:05 crc kubenswrapper[4941]: I1130 07:42:05.522274 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:42:05 crc kubenswrapper[4941]: E1130 07:42:05.523170 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:42:16 crc kubenswrapper[4941]: I1130 07:42:16.521822 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:42:16 crc kubenswrapper[4941]: E1130 07:42:16.522516 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:42:30 crc kubenswrapper[4941]: I1130 07:42:30.522936 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:42:30 crc kubenswrapper[4941]: E1130 07:42:30.524032 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:42:44 crc kubenswrapper[4941]: I1130 07:42:44.522260 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:42:44 crc kubenswrapper[4941]: E1130 07:42:44.523740 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:42:58 crc kubenswrapper[4941]: I1130 07:42:58.523270 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:42:58 crc kubenswrapper[4941]: E1130 07:42:58.524503 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:43:12 crc kubenswrapper[4941]: I1130 07:43:12.522790 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:43:12 crc kubenswrapper[4941]: E1130 07:43:12.524086 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:43:27 crc kubenswrapper[4941]: I1130 07:43:27.522263 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:43:27 crc kubenswrapper[4941]: E1130 07:43:27.523180 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:43:39 crc kubenswrapper[4941]: I1130 07:43:39.527070 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:43:39 crc kubenswrapper[4941]: I1130 07:43:39.710286 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"2f609ca886e9c809239760e3b3d6fb6ce6ff323e36c67161a331ea3a66600915"} Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.253752 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xwmtt"] Nov 30 07:44:04 crc kubenswrapper[4941]: E1130 07:44:04.254807 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="registry-server" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.254821 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="registry-server" Nov 30 07:44:04 crc kubenswrapper[4941]: E1130 07:44:04.254845 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="extract-content" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.254851 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="extract-content" Nov 30 07:44:04 crc kubenswrapper[4941]: E1130 07:44:04.254876 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="extract-utilities" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.254883 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="extract-utilities" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.255023 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7998922-0c6b-4af9-9f91-2d26d91e8f1c" containerName="registry-server" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.256232 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.275017 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xwmtt"] Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.322252 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d9mj\" (UniqueName: \"kubernetes.io/projected/33668f65-cc45-4661-ab67-3423342e0aa8-kube-api-access-8d9mj\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.322928 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-utilities\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.323205 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-catalog-content\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.425593 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d9mj\" (UniqueName: \"kubernetes.io/projected/33668f65-cc45-4661-ab67-3423342e0aa8-kube-api-access-8d9mj\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.425738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-utilities\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.425786 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-catalog-content\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.426758 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-utilities\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.426851 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-catalog-content\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.446798 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4bmgf"] Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.449004 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.459991 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bmgf"] Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.482775 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d9mj\" (UniqueName: \"kubernetes.io/projected/33668f65-cc45-4661-ab67-3423342e0aa8-kube-api-access-8d9mj\") pod \"community-operators-xwmtt\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.528282 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrxcs\" (UniqueName: \"kubernetes.io/projected/71872f12-399d-421c-824e-4e2c1d48856d-kube-api-access-qrxcs\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.528765 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-utilities\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.528868 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-catalog-content\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.588359 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.630062 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-utilities\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.630119 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-catalog-content\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.630188 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrxcs\" (UniqueName: \"kubernetes.io/projected/71872f12-399d-421c-824e-4e2c1d48856d-kube-api-access-qrxcs\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.630668 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-utilities\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.632817 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-catalog-content\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.651379 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrxcs\" (UniqueName: \"kubernetes.io/projected/71872f12-399d-421c-824e-4e2c1d48856d-kube-api-access-qrxcs\") pod \"redhat-marketplace-4bmgf\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.813064 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:04 crc kubenswrapper[4941]: I1130 07:44:04.993765 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xwmtt"] Nov 30 07:44:05 crc kubenswrapper[4941]: I1130 07:44:05.370911 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bmgf"] Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.017368 4941 generic.go:334] "Generic (PLEG): container finished" podID="71872f12-399d-421c-824e-4e2c1d48856d" containerID="f3f857e0d872389549fa0be225f96debdc7bd95b3e99b3381a722e3151155eb7" exitCode=0 Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.017749 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerDied","Data":"f3f857e0d872389549fa0be225f96debdc7bd95b3e99b3381a722e3151155eb7"} Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.017825 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerStarted","Data":"db450309305417d1036334a57f689db3a0f85913ab3ce1eb0323a5440f03a6c4"} Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.020805 4941 generic.go:334] "Generic (PLEG): container finished" podID="33668f65-cc45-4661-ab67-3423342e0aa8" containerID="3abdbe7cc50f827a8c52fc57fa1e38227d8c478b419ea9bb554370dccdb2481b" exitCode=0 Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.020877 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerDied","Data":"3abdbe7cc50f827a8c52fc57fa1e38227d8c478b419ea9bb554370dccdb2481b"} Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.020932 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerStarted","Data":"4828d8b2f6017e4be4b285fa47f565b2783d90199eca49a90a8e0c5555150d81"} Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.649984 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pdwwt"] Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.654663 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.665540 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pdwwt"] Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.780651 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-utilities\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.780951 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6kh2\" (UniqueName: \"kubernetes.io/projected/24806960-bab2-45aa-990a-014aafb206a7-kube-api-access-f6kh2\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.781091 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-catalog-content\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.882879 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6kh2\" (UniqueName: \"kubernetes.io/projected/24806960-bab2-45aa-990a-014aafb206a7-kube-api-access-f6kh2\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.882961 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-catalog-content\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.883054 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-utilities\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.883835 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-utilities\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.884082 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-catalog-content\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.906942 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6kh2\" (UniqueName: \"kubernetes.io/projected/24806960-bab2-45aa-990a-014aafb206a7-kube-api-access-f6kh2\") pod \"redhat-operators-pdwwt\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:06 crc kubenswrapper[4941]: I1130 07:44:06.975827 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:07 crc kubenswrapper[4941]: I1130 07:44:07.059790 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerStarted","Data":"c36687c6711a9fcd30ac12e98fcee4aaef3f949ccfa4b152884260d5b99582f8"} Nov 30 07:44:07 crc kubenswrapper[4941]: I1130 07:44:07.070370 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerStarted","Data":"303825205be27ea461e84514403a0f6e811a84f897be26f325708cbf28a5aae6"} Nov 30 07:44:07 crc kubenswrapper[4941]: I1130 07:44:07.277041 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pdwwt"] Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.079875 4941 generic.go:334] "Generic (PLEG): container finished" podID="71872f12-399d-421c-824e-4e2c1d48856d" containerID="303825205be27ea461e84514403a0f6e811a84f897be26f325708cbf28a5aae6" exitCode=0 Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.079949 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerDied","Data":"303825205be27ea461e84514403a0f6e811a84f897be26f325708cbf28a5aae6"} Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.082104 4941 generic.go:334] "Generic (PLEG): container finished" podID="33668f65-cc45-4661-ab67-3423342e0aa8" containerID="c36687c6711a9fcd30ac12e98fcee4aaef3f949ccfa4b152884260d5b99582f8" exitCode=0 Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.082235 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerDied","Data":"c36687c6711a9fcd30ac12e98fcee4aaef3f949ccfa4b152884260d5b99582f8"} Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.084805 4941 generic.go:334] "Generic (PLEG): container finished" podID="24806960-bab2-45aa-990a-014aafb206a7" containerID="acc5050fc3498689b3be9e58b8fef2055e5d37913f9ccfb9148cca8c8ae4b0e0" exitCode=0 Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.084841 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerDied","Data":"acc5050fc3498689b3be9e58b8fef2055e5d37913f9ccfb9148cca8c8ae4b0e0"} Nov 30 07:44:08 crc kubenswrapper[4941]: I1130 07:44:08.084911 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerStarted","Data":"4498d2032004ae71e5430ca24fc227c0ef0d57ed4d3f2c9171e25f5c47b4ae55"} Nov 30 07:44:09 crc kubenswrapper[4941]: I1130 07:44:09.096094 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerStarted","Data":"875fc6f539882b57c2dae6489109a7c12ed06d7c87a51330e176b8a930f3fc3f"} Nov 30 07:44:09 crc kubenswrapper[4941]: I1130 07:44:09.099431 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerStarted","Data":"989023db66c71214796069c2868edff9c75813660c64ff815f08871cb2d263b0"} Nov 30 07:44:09 crc kubenswrapper[4941]: I1130 07:44:09.102301 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerStarted","Data":"ecec607c69bb19e06f8fe8b12bee33f845b20bb9d1e354d5efd46406398752b3"} Nov 30 07:44:09 crc kubenswrapper[4941]: I1130 07:44:09.125250 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4bmgf" podStartSLOduration=2.6481307750000003 podStartE2EDuration="5.125222116s" podCreationTimestamp="2025-11-30 07:44:04 +0000 UTC" firstStartedPulling="2025-11-30 07:44:06.021392401 +0000 UTC m=+3466.789564040" lastFinishedPulling="2025-11-30 07:44:08.498483772 +0000 UTC m=+3469.266655381" observedRunningTime="2025-11-30 07:44:09.118809608 +0000 UTC m=+3469.886981227" watchObservedRunningTime="2025-11-30 07:44:09.125222116 +0000 UTC m=+3469.893393725" Nov 30 07:44:09 crc kubenswrapper[4941]: I1130 07:44:09.175809 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xwmtt" podStartSLOduration=2.693000231 podStartE2EDuration="5.175775578s" podCreationTimestamp="2025-11-30 07:44:04 +0000 UTC" firstStartedPulling="2025-11-30 07:44:06.026603422 +0000 UTC m=+3466.794775071" lastFinishedPulling="2025-11-30 07:44:08.509378809 +0000 UTC m=+3469.277550418" observedRunningTime="2025-11-30 07:44:09.165728938 +0000 UTC m=+3469.933900567" watchObservedRunningTime="2025-11-30 07:44:09.175775578 +0000 UTC m=+3469.943947197" Nov 30 07:44:10 crc kubenswrapper[4941]: I1130 07:44:10.114566 4941 generic.go:334] "Generic (PLEG): container finished" podID="24806960-bab2-45aa-990a-014aafb206a7" containerID="ecec607c69bb19e06f8fe8b12bee33f845b20bb9d1e354d5efd46406398752b3" exitCode=0 Nov 30 07:44:10 crc kubenswrapper[4941]: I1130 07:44:10.114720 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerDied","Data":"ecec607c69bb19e06f8fe8b12bee33f845b20bb9d1e354d5efd46406398752b3"} Nov 30 07:44:11 crc kubenswrapper[4941]: I1130 07:44:11.131383 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerStarted","Data":"a5f714014235c49b0ffc1b840be0bf8195c7bed919636fde9f3afbbce80b2c39"} Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.589146 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.589583 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.652774 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.685522 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pdwwt" podStartSLOduration=6.186353851 podStartE2EDuration="8.685499864s" podCreationTimestamp="2025-11-30 07:44:06 +0000 UTC" firstStartedPulling="2025-11-30 07:44:08.087114333 +0000 UTC m=+3468.855285942" lastFinishedPulling="2025-11-30 07:44:10.586260336 +0000 UTC m=+3471.354431955" observedRunningTime="2025-11-30 07:44:11.155969647 +0000 UTC m=+3471.924141286" watchObservedRunningTime="2025-11-30 07:44:14.685499864 +0000 UTC m=+3475.453671473" Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.815057 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.815465 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:14 crc kubenswrapper[4941]: I1130 07:44:14.885460 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:15 crc kubenswrapper[4941]: I1130 07:44:15.221336 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:15 crc kubenswrapper[4941]: I1130 07:44:15.261224 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:16 crc kubenswrapper[4941]: I1130 07:44:16.227612 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xwmtt"] Nov 30 07:44:16 crc kubenswrapper[4941]: I1130 07:44:16.976307 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:16 crc kubenswrapper[4941]: I1130 07:44:16.976448 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:17 crc kubenswrapper[4941]: I1130 07:44:17.029615 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:17 crc kubenswrapper[4941]: I1130 07:44:17.197911 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xwmtt" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="registry-server" containerID="cri-o://989023db66c71214796069c2868edff9c75813660c64ff815f08871cb2d263b0" gracePeriod=2 Nov 30 07:44:17 crc kubenswrapper[4941]: I1130 07:44:17.228719 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bmgf"] Nov 30 07:44:17 crc kubenswrapper[4941]: I1130 07:44:17.229242 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4bmgf" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="registry-server" containerID="cri-o://875fc6f539882b57c2dae6489109a7c12ed06d7c87a51330e176b8a930f3fc3f" gracePeriod=2 Nov 30 07:44:17 crc kubenswrapper[4941]: I1130 07:44:17.277642 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.207480 4941 generic.go:334] "Generic (PLEG): container finished" podID="71872f12-399d-421c-824e-4e2c1d48856d" containerID="875fc6f539882b57c2dae6489109a7c12ed06d7c87a51330e176b8a930f3fc3f" exitCode=0 Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.207542 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerDied","Data":"875fc6f539882b57c2dae6489109a7c12ed06d7c87a51330e176b8a930f3fc3f"} Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.210772 4941 generic.go:334] "Generic (PLEG): container finished" podID="33668f65-cc45-4661-ab67-3423342e0aa8" containerID="989023db66c71214796069c2868edff9c75813660c64ff815f08871cb2d263b0" exitCode=0 Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.210832 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerDied","Data":"989023db66c71214796069c2868edff9c75813660c64ff815f08871cb2d263b0"} Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.883296 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.887978 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.908847 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-catalog-content\") pod \"71872f12-399d-421c-824e-4e2c1d48856d\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.909253 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-utilities\") pod \"71872f12-399d-421c-824e-4e2c1d48856d\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.909360 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-catalog-content\") pod \"33668f65-cc45-4661-ab67-3423342e0aa8\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.909399 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d9mj\" (UniqueName: \"kubernetes.io/projected/33668f65-cc45-4661-ab67-3423342e0aa8-kube-api-access-8d9mj\") pod \"33668f65-cc45-4661-ab67-3423342e0aa8\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.910008 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-utilities\") pod \"33668f65-cc45-4661-ab67-3423342e0aa8\" (UID: \"33668f65-cc45-4661-ab67-3423342e0aa8\") " Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.910089 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrxcs\" (UniqueName: \"kubernetes.io/projected/71872f12-399d-421c-824e-4e2c1d48856d-kube-api-access-qrxcs\") pod \"71872f12-399d-421c-824e-4e2c1d48856d\" (UID: \"71872f12-399d-421c-824e-4e2c1d48856d\") " Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.911121 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-utilities" (OuterVolumeSpecName: "utilities") pod "33668f65-cc45-4661-ab67-3423342e0aa8" (UID: "33668f65-cc45-4661-ab67-3423342e0aa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.913669 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-utilities" (OuterVolumeSpecName: "utilities") pod "71872f12-399d-421c-824e-4e2c1d48856d" (UID: "71872f12-399d-421c-824e-4e2c1d48856d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.920536 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33668f65-cc45-4661-ab67-3423342e0aa8-kube-api-access-8d9mj" (OuterVolumeSpecName: "kube-api-access-8d9mj") pod "33668f65-cc45-4661-ab67-3423342e0aa8" (UID: "33668f65-cc45-4661-ab67-3423342e0aa8"). InnerVolumeSpecName "kube-api-access-8d9mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.921041 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71872f12-399d-421c-824e-4e2c1d48856d-kube-api-access-qrxcs" (OuterVolumeSpecName: "kube-api-access-qrxcs") pod "71872f12-399d-421c-824e-4e2c1d48856d" (UID: "71872f12-399d-421c-824e-4e2c1d48856d"). InnerVolumeSpecName "kube-api-access-qrxcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.953275 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71872f12-399d-421c-824e-4e2c1d48856d" (UID: "71872f12-399d-421c-824e-4e2c1d48856d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:44:18 crc kubenswrapper[4941]: I1130 07:44:18.987054 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33668f65-cc45-4661-ab67-3423342e0aa8" (UID: "33668f65-cc45-4661-ab67-3423342e0aa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.012445 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.012489 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.012511 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d9mj\" (UniqueName: \"kubernetes.io/projected/33668f65-cc45-4661-ab67-3423342e0aa8-kube-api-access-8d9mj\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.012530 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33668f65-cc45-4661-ab67-3423342e0aa8-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.012552 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrxcs\" (UniqueName: \"kubernetes.io/projected/71872f12-399d-421c-824e-4e2c1d48856d-kube-api-access-qrxcs\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.012575 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71872f12-399d-421c-824e-4e2c1d48856d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.231072 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xwmtt" event={"ID":"33668f65-cc45-4661-ab67-3423342e0aa8","Type":"ContainerDied","Data":"4828d8b2f6017e4be4b285fa47f565b2783d90199eca49a90a8e0c5555150d81"} Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.231145 4941 scope.go:117] "RemoveContainer" containerID="989023db66c71214796069c2868edff9c75813660c64ff815f08871cb2d263b0" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.231136 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xwmtt" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.236600 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bmgf" event={"ID":"71872f12-399d-421c-824e-4e2c1d48856d","Type":"ContainerDied","Data":"db450309305417d1036334a57f689db3a0f85913ab3ce1eb0323a5440f03a6c4"} Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.236721 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bmgf" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.286037 4941 scope.go:117] "RemoveContainer" containerID="c36687c6711a9fcd30ac12e98fcee4aaef3f949ccfa4b152884260d5b99582f8" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.291192 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xwmtt"] Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.308414 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xwmtt"] Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.321796 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bmgf"] Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.330433 4941 scope.go:117] "RemoveContainer" containerID="3abdbe7cc50f827a8c52fc57fa1e38227d8c478b419ea9bb554370dccdb2481b" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.334047 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bmgf"] Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.354868 4941 scope.go:117] "RemoveContainer" containerID="875fc6f539882b57c2dae6489109a7c12ed06d7c87a51330e176b8a930f3fc3f" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.376961 4941 scope.go:117] "RemoveContainer" containerID="303825205be27ea461e84514403a0f6e811a84f897be26f325708cbf28a5aae6" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.404774 4941 scope.go:117] "RemoveContainer" containerID="f3f857e0d872389549fa0be225f96debdc7bd95b3e99b3381a722e3151155eb7" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.538731 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" path="/var/lib/kubelet/pods/33668f65-cc45-4661-ab67-3423342e0aa8/volumes" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.539668 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71872f12-399d-421c-824e-4e2c1d48856d" path="/var/lib/kubelet/pods/71872f12-399d-421c-824e-4e2c1d48856d/volumes" Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.628184 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pdwwt"] Nov 30 07:44:19 crc kubenswrapper[4941]: I1130 07:44:19.628467 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pdwwt" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="registry-server" containerID="cri-o://a5f714014235c49b0ffc1b840be0bf8195c7bed919636fde9f3afbbce80b2c39" gracePeriod=2 Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.252796 4941 generic.go:334] "Generic (PLEG): container finished" podID="24806960-bab2-45aa-990a-014aafb206a7" containerID="a5f714014235c49b0ffc1b840be0bf8195c7bed919636fde9f3afbbce80b2c39" exitCode=0 Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.253033 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerDied","Data":"a5f714014235c49b0ffc1b840be0bf8195c7bed919636fde9f3afbbce80b2c39"} Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.562405 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.644675 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-utilities\") pod \"24806960-bab2-45aa-990a-014aafb206a7\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.644762 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-catalog-content\") pod \"24806960-bab2-45aa-990a-014aafb206a7\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.644809 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6kh2\" (UniqueName: \"kubernetes.io/projected/24806960-bab2-45aa-990a-014aafb206a7-kube-api-access-f6kh2\") pod \"24806960-bab2-45aa-990a-014aafb206a7\" (UID: \"24806960-bab2-45aa-990a-014aafb206a7\") " Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.646785 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-utilities" (OuterVolumeSpecName: "utilities") pod "24806960-bab2-45aa-990a-014aafb206a7" (UID: "24806960-bab2-45aa-990a-014aafb206a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.652639 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24806960-bab2-45aa-990a-014aafb206a7-kube-api-access-f6kh2" (OuterVolumeSpecName: "kube-api-access-f6kh2") pod "24806960-bab2-45aa-990a-014aafb206a7" (UID: "24806960-bab2-45aa-990a-014aafb206a7"). InnerVolumeSpecName "kube-api-access-f6kh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.747336 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.747376 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6kh2\" (UniqueName: \"kubernetes.io/projected/24806960-bab2-45aa-990a-014aafb206a7-kube-api-access-f6kh2\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.794589 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24806960-bab2-45aa-990a-014aafb206a7" (UID: "24806960-bab2-45aa-990a-014aafb206a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:44:20 crc kubenswrapper[4941]: I1130 07:44:20.848483 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24806960-bab2-45aa-990a-014aafb206a7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.266593 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pdwwt" event={"ID":"24806960-bab2-45aa-990a-014aafb206a7","Type":"ContainerDied","Data":"4498d2032004ae71e5430ca24fc227c0ef0d57ed4d3f2c9171e25f5c47b4ae55"} Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.266998 4941 scope.go:117] "RemoveContainer" containerID="a5f714014235c49b0ffc1b840be0bf8195c7bed919636fde9f3afbbce80b2c39" Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.267006 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pdwwt" Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.290604 4941 scope.go:117] "RemoveContainer" containerID="ecec607c69bb19e06f8fe8b12bee33f845b20bb9d1e354d5efd46406398752b3" Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.318892 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pdwwt"] Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.325669 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pdwwt"] Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.340408 4941 scope.go:117] "RemoveContainer" containerID="acc5050fc3498689b3be9e58b8fef2055e5d37913f9ccfb9148cca8c8ae4b0e0" Nov 30 07:44:21 crc kubenswrapper[4941]: I1130 07:44:21.540888 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24806960-bab2-45aa-990a-014aafb206a7" path="/var/lib/kubelet/pods/24806960-bab2-45aa-990a-014aafb206a7/volumes" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.172175 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p"] Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.174971 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.175102 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.176721 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="extract-content" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.176821 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="extract-content" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.176885 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="extract-utilities" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.176957 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="extract-utilities" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.177018 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="extract-utilities" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.177073 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="extract-utilities" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.177133 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.177189 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.177261 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="extract-content" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.177325 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="extract-content" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.177407 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="extract-content" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.177462 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="extract-content" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.177539 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.177602 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: E1130 07:45:00.178630 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="extract-utilities" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.178722 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="extract-utilities" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.179293 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="71872f12-399d-421c-824e-4e2c1d48856d" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.179386 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="24806960-bab2-45aa-990a-014aafb206a7" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.179420 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="33668f65-cc45-4661-ab67-3423342e0aa8" containerName="registry-server" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.180648 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.185203 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.189659 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p"] Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.192055 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.242878 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-config-volume\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.242921 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptj8p\" (UniqueName: \"kubernetes.io/projected/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-kube-api-access-ptj8p\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.242940 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-secret-volume\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.344940 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-config-volume\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.344985 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptj8p\" (UniqueName: \"kubernetes.io/projected/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-kube-api-access-ptj8p\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.345009 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-secret-volume\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.346302 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-config-volume\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.354509 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-secret-volume\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.365830 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptj8p\" (UniqueName: \"kubernetes.io/projected/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-kube-api-access-ptj8p\") pod \"collect-profiles-29408145-7q47p\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.523115 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:00 crc kubenswrapper[4941]: I1130 07:45:00.794967 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p"] Nov 30 07:45:01 crc kubenswrapper[4941]: I1130 07:45:01.673249 4941 generic.go:334] "Generic (PLEG): container finished" podID="29fb1557-dd9b-447b-aaa4-6bd9f2daf116" containerID="d24c30dd7f06fee25e56f62176ace5efc6467dd5c45ff75bef404aeadc35cb80" exitCode=0 Nov 30 07:45:01 crc kubenswrapper[4941]: I1130 07:45:01.673301 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" event={"ID":"29fb1557-dd9b-447b-aaa4-6bd9f2daf116","Type":"ContainerDied","Data":"d24c30dd7f06fee25e56f62176ace5efc6467dd5c45ff75bef404aeadc35cb80"} Nov 30 07:45:01 crc kubenswrapper[4941]: I1130 07:45:01.673715 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" event={"ID":"29fb1557-dd9b-447b-aaa4-6bd9f2daf116","Type":"ContainerStarted","Data":"6ce387e6b2961bbd8ce2812a45e12a0954c0f34b99d4b1582afc6dcec5728349"} Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.056101 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.096233 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptj8p\" (UniqueName: \"kubernetes.io/projected/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-kube-api-access-ptj8p\") pod \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.096405 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-secret-volume\") pod \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.096515 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-config-volume\") pod \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\" (UID: \"29fb1557-dd9b-447b-aaa4-6bd9f2daf116\") " Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.098071 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-config-volume" (OuterVolumeSpecName: "config-volume") pod "29fb1557-dd9b-447b-aaa4-6bd9f2daf116" (UID: "29fb1557-dd9b-447b-aaa4-6bd9f2daf116"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.105078 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-kube-api-access-ptj8p" (OuterVolumeSpecName: "kube-api-access-ptj8p") pod "29fb1557-dd9b-447b-aaa4-6bd9f2daf116" (UID: "29fb1557-dd9b-447b-aaa4-6bd9f2daf116"). InnerVolumeSpecName "kube-api-access-ptj8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.108972 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "29fb1557-dd9b-447b-aaa4-6bd9f2daf116" (UID: "29fb1557-dd9b-447b-aaa4-6bd9f2daf116"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.199574 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.199624 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.199643 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptj8p\" (UniqueName: \"kubernetes.io/projected/29fb1557-dd9b-447b-aaa4-6bd9f2daf116-kube-api-access-ptj8p\") on node \"crc\" DevicePath \"\"" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.698874 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" event={"ID":"29fb1557-dd9b-447b-aaa4-6bd9f2daf116","Type":"ContainerDied","Data":"6ce387e6b2961bbd8ce2812a45e12a0954c0f34b99d4b1582afc6dcec5728349"} Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.698927 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ce387e6b2961bbd8ce2812a45e12a0954c0f34b99d4b1582afc6dcec5728349" Nov 30 07:45:03 crc kubenswrapper[4941]: I1130 07:45:03.699602 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p" Nov 30 07:45:04 crc kubenswrapper[4941]: I1130 07:45:04.140520 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs"] Nov 30 07:45:04 crc kubenswrapper[4941]: I1130 07:45:04.146532 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408100-kwtxs"] Nov 30 07:45:05 crc kubenswrapper[4941]: I1130 07:45:05.542286 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f6d08f8-1b6d-4579-bde6-104b9c1aac9d" path="/var/lib/kubelet/pods/1f6d08f8-1b6d-4579-bde6-104b9c1aac9d/volumes" Nov 30 07:45:39 crc kubenswrapper[4941]: I1130 07:45:39.493244 4941 scope.go:117] "RemoveContainer" containerID="220fa5542fbb35a72334456ec34032f0c610688a8e0f506d8074c203299214b6" Nov 30 07:46:02 crc kubenswrapper[4941]: I1130 07:46:02.979465 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:46:02 crc kubenswrapper[4941]: I1130 07:46:02.980320 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:46:32 crc kubenswrapper[4941]: I1130 07:46:32.979007 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:46:32 crc kubenswrapper[4941]: I1130 07:46:32.979747 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:47:02 crc kubenswrapper[4941]: I1130 07:47:02.978834 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:47:02 crc kubenswrapper[4941]: I1130 07:47:02.980552 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:47:02 crc kubenswrapper[4941]: I1130 07:47:02.980689 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:47:02 crc kubenswrapper[4941]: I1130 07:47:02.981503 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2f609ca886e9c809239760e3b3d6fb6ce6ff323e36c67161a331ea3a66600915"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:47:02 crc kubenswrapper[4941]: I1130 07:47:02.981696 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://2f609ca886e9c809239760e3b3d6fb6ce6ff323e36c67161a331ea3a66600915" gracePeriod=600 Nov 30 07:47:03 crc kubenswrapper[4941]: I1130 07:47:03.885674 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="2f609ca886e9c809239760e3b3d6fb6ce6ff323e36c67161a331ea3a66600915" exitCode=0 Nov 30 07:47:03 crc kubenswrapper[4941]: I1130 07:47:03.885778 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"2f609ca886e9c809239760e3b3d6fb6ce6ff323e36c67161a331ea3a66600915"} Nov 30 07:47:03 crc kubenswrapper[4941]: I1130 07:47:03.886785 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7"} Nov 30 07:47:03 crc kubenswrapper[4941]: I1130 07:47:03.886833 4941 scope.go:117] "RemoveContainer" containerID="ac415f00bc664d1be4fb4c427aabb47b6f8101127dabbc61108764da0c25eabf" Nov 30 07:49:32 crc kubenswrapper[4941]: I1130 07:49:32.978642 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:49:32 crc kubenswrapper[4941]: I1130 07:49:32.979627 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.505880 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rpvxf"] Nov 30 07:49:52 crc kubenswrapper[4941]: E1130 07:49:52.507610 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fb1557-dd9b-447b-aaa4-6bd9f2daf116" containerName="collect-profiles" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.507640 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fb1557-dd9b-447b-aaa4-6bd9f2daf116" containerName="collect-profiles" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.508024 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fb1557-dd9b-447b-aaa4-6bd9f2daf116" containerName="collect-profiles" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.510452 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.533511 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rpvxf"] Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.707356 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4t68\" (UniqueName: \"kubernetes.io/projected/46857515-2fbf-43bc-8808-90b4abfb3dd1-kube-api-access-x4t68\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.709170 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-utilities\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.709392 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-catalog-content\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.811259 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4t68\" (UniqueName: \"kubernetes.io/projected/46857515-2fbf-43bc-8808-90b4abfb3dd1-kube-api-access-x4t68\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.811851 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-utilities\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.812152 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-catalog-content\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.812442 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-utilities\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.813079 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-catalog-content\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.833816 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4t68\" (UniqueName: \"kubernetes.io/projected/46857515-2fbf-43bc-8808-90b4abfb3dd1-kube-api-access-x4t68\") pod \"certified-operators-rpvxf\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:52 crc kubenswrapper[4941]: I1130 07:49:52.870661 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:49:53 crc kubenswrapper[4941]: I1130 07:49:53.195654 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rpvxf"] Nov 30 07:49:53 crc kubenswrapper[4941]: I1130 07:49:53.662907 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerStarted","Data":"2e372e0e3a3da75ffe3dc2ffc876703986c2e3a9ff8045d6a0aad2201f02ff43"} Nov 30 07:49:54 crc kubenswrapper[4941]: I1130 07:49:54.671029 4941 generic.go:334] "Generic (PLEG): container finished" podID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerID="12cb8897a0acc3d68958db6a5a103f7db3f6a337e2fac617521e76d741d3e57e" exitCode=0 Nov 30 07:49:54 crc kubenswrapper[4941]: I1130 07:49:54.671273 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerDied","Data":"12cb8897a0acc3d68958db6a5a103f7db3f6a337e2fac617521e76d741d3e57e"} Nov 30 07:49:54 crc kubenswrapper[4941]: I1130 07:49:54.675854 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:49:55 crc kubenswrapper[4941]: I1130 07:49:55.682811 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerStarted","Data":"e6c4e59e4f174068ed6bd6613d790b3916f4310620296ea74e37c24907b67162"} Nov 30 07:49:56 crc kubenswrapper[4941]: I1130 07:49:56.696151 4941 generic.go:334] "Generic (PLEG): container finished" podID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerID="e6c4e59e4f174068ed6bd6613d790b3916f4310620296ea74e37c24907b67162" exitCode=0 Nov 30 07:49:56 crc kubenswrapper[4941]: I1130 07:49:56.696206 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerDied","Data":"e6c4e59e4f174068ed6bd6613d790b3916f4310620296ea74e37c24907b67162"} Nov 30 07:49:57 crc kubenswrapper[4941]: I1130 07:49:57.712208 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerStarted","Data":"1abedd891f6dbce9c98a4c11d707c0c122c0a10d64a5fd9fe1eeb4de687b01f9"} Nov 30 07:50:02 crc kubenswrapper[4941]: I1130 07:50:02.871466 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:50:02 crc kubenswrapper[4941]: I1130 07:50:02.872369 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:50:02 crc kubenswrapper[4941]: I1130 07:50:02.978713 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:50:02 crc kubenswrapper[4941]: I1130 07:50:02.978817 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:50:02 crc kubenswrapper[4941]: I1130 07:50:02.981239 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:50:03 crc kubenswrapper[4941]: I1130 07:50:03.008815 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rpvxf" podStartSLOduration=8.609237222 podStartE2EDuration="11.008788532s" podCreationTimestamp="2025-11-30 07:49:52 +0000 UTC" firstStartedPulling="2025-11-30 07:49:54.675602185 +0000 UTC m=+3815.443773794" lastFinishedPulling="2025-11-30 07:49:57.075153505 +0000 UTC m=+3817.843325104" observedRunningTime="2025-11-30 07:49:57.771809412 +0000 UTC m=+3818.539981051" watchObservedRunningTime="2025-11-30 07:50:03.008788532 +0000 UTC m=+3823.776960151" Nov 30 07:50:03 crc kubenswrapper[4941]: I1130 07:50:03.830857 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:50:03 crc kubenswrapper[4941]: I1130 07:50:03.900023 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rpvxf"] Nov 30 07:50:05 crc kubenswrapper[4941]: I1130 07:50:05.779265 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rpvxf" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="registry-server" containerID="cri-o://1abedd891f6dbce9c98a4c11d707c0c122c0a10d64a5fd9fe1eeb4de687b01f9" gracePeriod=2 Nov 30 07:50:06 crc kubenswrapper[4941]: I1130 07:50:06.808200 4941 generic.go:334] "Generic (PLEG): container finished" podID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerID="1abedd891f6dbce9c98a4c11d707c0c122c0a10d64a5fd9fe1eeb4de687b01f9" exitCode=0 Nov 30 07:50:06 crc kubenswrapper[4941]: I1130 07:50:06.808275 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerDied","Data":"1abedd891f6dbce9c98a4c11d707c0c122c0a10d64a5fd9fe1eeb4de687b01f9"} Nov 30 07:50:06 crc kubenswrapper[4941]: I1130 07:50:06.938788 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.043228 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-utilities\") pod \"46857515-2fbf-43bc-8808-90b4abfb3dd1\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.043340 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-catalog-content\") pod \"46857515-2fbf-43bc-8808-90b4abfb3dd1\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.043453 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4t68\" (UniqueName: \"kubernetes.io/projected/46857515-2fbf-43bc-8808-90b4abfb3dd1-kube-api-access-x4t68\") pod \"46857515-2fbf-43bc-8808-90b4abfb3dd1\" (UID: \"46857515-2fbf-43bc-8808-90b4abfb3dd1\") " Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.044482 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-utilities" (OuterVolumeSpecName: "utilities") pod "46857515-2fbf-43bc-8808-90b4abfb3dd1" (UID: "46857515-2fbf-43bc-8808-90b4abfb3dd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.051663 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46857515-2fbf-43bc-8808-90b4abfb3dd1-kube-api-access-x4t68" (OuterVolumeSpecName: "kube-api-access-x4t68") pod "46857515-2fbf-43bc-8808-90b4abfb3dd1" (UID: "46857515-2fbf-43bc-8808-90b4abfb3dd1"). InnerVolumeSpecName "kube-api-access-x4t68". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.103104 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "46857515-2fbf-43bc-8808-90b4abfb3dd1" (UID: "46857515-2fbf-43bc-8808-90b4abfb3dd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.145435 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4t68\" (UniqueName: \"kubernetes.io/projected/46857515-2fbf-43bc-8808-90b4abfb3dd1-kube-api-access-x4t68\") on node \"crc\" DevicePath \"\"" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.145466 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.145477 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46857515-2fbf-43bc-8808-90b4abfb3dd1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.818341 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rpvxf" event={"ID":"46857515-2fbf-43bc-8808-90b4abfb3dd1","Type":"ContainerDied","Data":"2e372e0e3a3da75ffe3dc2ffc876703986c2e3a9ff8045d6a0aad2201f02ff43"} Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.818387 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rpvxf" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.818692 4941 scope.go:117] "RemoveContainer" containerID="1abedd891f6dbce9c98a4c11d707c0c122c0a10d64a5fd9fe1eeb4de687b01f9" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.844254 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rpvxf"] Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.852956 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rpvxf"] Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.869910 4941 scope.go:117] "RemoveContainer" containerID="e6c4e59e4f174068ed6bd6613d790b3916f4310620296ea74e37c24907b67162" Nov 30 07:50:07 crc kubenswrapper[4941]: I1130 07:50:07.895873 4941 scope.go:117] "RemoveContainer" containerID="12cb8897a0acc3d68958db6a5a103f7db3f6a337e2fac617521e76d741d3e57e" Nov 30 07:50:09 crc kubenswrapper[4941]: I1130 07:50:09.535297 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" path="/var/lib/kubelet/pods/46857515-2fbf-43bc-8808-90b4abfb3dd1/volumes" Nov 30 07:50:32 crc kubenswrapper[4941]: I1130 07:50:32.978398 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:50:32 crc kubenswrapper[4941]: I1130 07:50:32.979057 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:50:32 crc kubenswrapper[4941]: I1130 07:50:32.979124 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:50:32 crc kubenswrapper[4941]: I1130 07:50:32.980045 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:50:32 crc kubenswrapper[4941]: I1130 07:50:32.980156 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" gracePeriod=600 Nov 30 07:50:33 crc kubenswrapper[4941]: E1130 07:50:33.112070 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:50:34 crc kubenswrapper[4941]: I1130 07:50:34.033616 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" exitCode=0 Nov 30 07:50:34 crc kubenswrapper[4941]: I1130 07:50:34.033698 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7"} Nov 30 07:50:34 crc kubenswrapper[4941]: I1130 07:50:34.033915 4941 scope.go:117] "RemoveContainer" containerID="2f609ca886e9c809239760e3b3d6fb6ce6ff323e36c67161a331ea3a66600915" Nov 30 07:50:34 crc kubenswrapper[4941]: I1130 07:50:34.034407 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:50:34 crc kubenswrapper[4941]: E1130 07:50:34.034617 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:50:44 crc kubenswrapper[4941]: I1130 07:50:44.521476 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:50:44 crc kubenswrapper[4941]: E1130 07:50:44.522314 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:50:57 crc kubenswrapper[4941]: I1130 07:50:57.522078 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:50:57 crc kubenswrapper[4941]: E1130 07:50:57.523026 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:51:11 crc kubenswrapper[4941]: I1130 07:51:11.522742 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:51:11 crc kubenswrapper[4941]: E1130 07:51:11.524014 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:51:25 crc kubenswrapper[4941]: I1130 07:51:25.521878 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:51:25 crc kubenswrapper[4941]: E1130 07:51:25.522736 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:51:38 crc kubenswrapper[4941]: I1130 07:51:38.522054 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:51:38 crc kubenswrapper[4941]: E1130 07:51:38.522827 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:51:53 crc kubenswrapper[4941]: I1130 07:51:53.523642 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:51:53 crc kubenswrapper[4941]: E1130 07:51:53.524857 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:52:06 crc kubenswrapper[4941]: I1130 07:52:06.522178 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:52:06 crc kubenswrapper[4941]: E1130 07:52:06.523309 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:52:19 crc kubenswrapper[4941]: I1130 07:52:19.530519 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:52:19 crc kubenswrapper[4941]: E1130 07:52:19.531719 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:52:32 crc kubenswrapper[4941]: I1130 07:52:32.522536 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:52:32 crc kubenswrapper[4941]: E1130 07:52:32.525313 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:52:45 crc kubenswrapper[4941]: I1130 07:52:45.522576 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:52:45 crc kubenswrapper[4941]: E1130 07:52:45.523976 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:52:58 crc kubenswrapper[4941]: I1130 07:52:58.522036 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:52:58 crc kubenswrapper[4941]: E1130 07:52:58.523233 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:53:13 crc kubenswrapper[4941]: I1130 07:53:13.523002 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:53:13 crc kubenswrapper[4941]: E1130 07:53:13.524432 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:53:27 crc kubenswrapper[4941]: I1130 07:53:27.522528 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:53:27 crc kubenswrapper[4941]: E1130 07:53:27.524831 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:53:31 crc kubenswrapper[4941]: I1130 07:53:31.896354 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-kd2wq"] Nov 30 07:53:31 crc kubenswrapper[4941]: I1130 07:53:31.906365 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-kd2wq"] Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.035905 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-xgt49"] Nov 30 07:53:32 crc kubenswrapper[4941]: E1130 07:53:32.036513 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="extract-utilities" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.036545 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="extract-utilities" Nov 30 07:53:32 crc kubenswrapper[4941]: E1130 07:53:32.036570 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="registry-server" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.036580 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="registry-server" Nov 30 07:53:32 crc kubenswrapper[4941]: E1130 07:53:32.036591 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="extract-content" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.036604 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="extract-content" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.036819 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="46857515-2fbf-43bc-8808-90b4abfb3dd1" containerName="registry-server" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.048592 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.049586 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xgt49"] Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.055103 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.055997 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.056532 4941 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-tkjrj" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.059911 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.224995 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/91e15d36-2048-4136-8428-045598d4b13c-node-mnt\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.225055 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/91e15d36-2048-4136-8428-045598d4b13c-crc-storage\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.225087 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpg8s\" (UniqueName: \"kubernetes.io/projected/91e15d36-2048-4136-8428-045598d4b13c-kube-api-access-fpg8s\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.325897 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/91e15d36-2048-4136-8428-045598d4b13c-node-mnt\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.325962 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/91e15d36-2048-4136-8428-045598d4b13c-crc-storage\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.325987 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpg8s\" (UniqueName: \"kubernetes.io/projected/91e15d36-2048-4136-8428-045598d4b13c-kube-api-access-fpg8s\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.326681 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/91e15d36-2048-4136-8428-045598d4b13c-node-mnt\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.327001 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/91e15d36-2048-4136-8428-045598d4b13c-crc-storage\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.360838 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpg8s\" (UniqueName: \"kubernetes.io/projected/91e15d36-2048-4136-8428-045598d4b13c-kube-api-access-fpg8s\") pod \"crc-storage-crc-xgt49\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.378592 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:32 crc kubenswrapper[4941]: I1130 07:53:32.913444 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-xgt49"] Nov 30 07:53:33 crc kubenswrapper[4941]: I1130 07:53:33.541188 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48497e9c-eebe-41d6-8ed3-a9717a43ddae" path="/var/lib/kubelet/pods/48497e9c-eebe-41d6-8ed3-a9717a43ddae/volumes" Nov 30 07:53:33 crc kubenswrapper[4941]: I1130 07:53:33.886220 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xgt49" event={"ID":"91e15d36-2048-4136-8428-045598d4b13c","Type":"ContainerStarted","Data":"b679814310c0d8bb482470a68c37d2b4050f27f4970c86a1d4d51ae7b76153be"} Nov 30 07:53:33 crc kubenswrapper[4941]: I1130 07:53:33.886630 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xgt49" event={"ID":"91e15d36-2048-4136-8428-045598d4b13c","Type":"ContainerStarted","Data":"97f6d9e809f7c25745643ba1c6538e529b5f9baa76c94c8dd3cb85f2b405b39a"} Nov 30 07:53:33 crc kubenswrapper[4941]: I1130 07:53:33.909552 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-xgt49" podStartSLOduration=1.345975047 podStartE2EDuration="1.909513231s" podCreationTimestamp="2025-11-30 07:53:32 +0000 UTC" firstStartedPulling="2025-11-30 07:53:32.932040466 +0000 UTC m=+4033.700212085" lastFinishedPulling="2025-11-30 07:53:33.49557866 +0000 UTC m=+4034.263750269" observedRunningTime="2025-11-30 07:53:33.900074779 +0000 UTC m=+4034.668246398" watchObservedRunningTime="2025-11-30 07:53:33.909513231 +0000 UTC m=+4034.677684850" Nov 30 07:53:34 crc kubenswrapper[4941]: I1130 07:53:34.900380 4941 generic.go:334] "Generic (PLEG): container finished" podID="91e15d36-2048-4136-8428-045598d4b13c" containerID="b679814310c0d8bb482470a68c37d2b4050f27f4970c86a1d4d51ae7b76153be" exitCode=0 Nov 30 07:53:34 crc kubenswrapper[4941]: I1130 07:53:34.900453 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xgt49" event={"ID":"91e15d36-2048-4136-8428-045598d4b13c","Type":"ContainerDied","Data":"b679814310c0d8bb482470a68c37d2b4050f27f4970c86a1d4d51ae7b76153be"} Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.360745 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.435728 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/91e15d36-2048-4136-8428-045598d4b13c-crc-storage\") pod \"91e15d36-2048-4136-8428-045598d4b13c\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.435792 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/91e15d36-2048-4136-8428-045598d4b13c-node-mnt\") pod \"91e15d36-2048-4136-8428-045598d4b13c\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.435822 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpg8s\" (UniqueName: \"kubernetes.io/projected/91e15d36-2048-4136-8428-045598d4b13c-kube-api-access-fpg8s\") pod \"91e15d36-2048-4136-8428-045598d4b13c\" (UID: \"91e15d36-2048-4136-8428-045598d4b13c\") " Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.435927 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/91e15d36-2048-4136-8428-045598d4b13c-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "91e15d36-2048-4136-8428-045598d4b13c" (UID: "91e15d36-2048-4136-8428-045598d4b13c"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.436234 4941 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/91e15d36-2048-4136-8428-045598d4b13c-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.442528 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e15d36-2048-4136-8428-045598d4b13c-kube-api-access-fpg8s" (OuterVolumeSpecName: "kube-api-access-fpg8s") pod "91e15d36-2048-4136-8428-045598d4b13c" (UID: "91e15d36-2048-4136-8428-045598d4b13c"). InnerVolumeSpecName "kube-api-access-fpg8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.472992 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91e15d36-2048-4136-8428-045598d4b13c-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "91e15d36-2048-4136-8428-045598d4b13c" (UID: "91e15d36-2048-4136-8428-045598d4b13c"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.538194 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpg8s\" (UniqueName: \"kubernetes.io/projected/91e15d36-2048-4136-8428-045598d4b13c-kube-api-access-fpg8s\") on node \"crc\" DevicePath \"\"" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.538691 4941 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/91e15d36-2048-4136-8428-045598d4b13c-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.923408 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-xgt49" event={"ID":"91e15d36-2048-4136-8428-045598d4b13c","Type":"ContainerDied","Data":"97f6d9e809f7c25745643ba1c6538e529b5f9baa76c94c8dd3cb85f2b405b39a"} Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.923465 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97f6d9e809f7c25745643ba1c6538e529b5f9baa76c94c8dd3cb85f2b405b39a" Nov 30 07:53:36 crc kubenswrapper[4941]: I1130 07:53:36.923465 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-xgt49" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.285894 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-xgt49"] Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.299447 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-xgt49"] Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.410257 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-ptxsw"] Nov 30 07:53:38 crc kubenswrapper[4941]: E1130 07:53:38.410662 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e15d36-2048-4136-8428-045598d4b13c" containerName="storage" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.410681 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e15d36-2048-4136-8428-045598d4b13c" containerName="storage" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.410903 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e15d36-2048-4136-8428-045598d4b13c" containerName="storage" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.411475 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.413708 4941 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-tkjrj" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.415587 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.415967 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.416663 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.432160 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ptxsw"] Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.479445 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/46514814-650b-4494-ae36-8a9806783944-crc-storage\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.479706 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/46514814-650b-4494-ae36-8a9806783944-node-mnt\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.479784 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chh94\" (UniqueName: \"kubernetes.io/projected/46514814-650b-4494-ae36-8a9806783944-kube-api-access-chh94\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.581178 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chh94\" (UniqueName: \"kubernetes.io/projected/46514814-650b-4494-ae36-8a9806783944-kube-api-access-chh94\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.581394 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/46514814-650b-4494-ae36-8a9806783944-crc-storage\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.581580 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/46514814-650b-4494-ae36-8a9806783944-node-mnt\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.582990 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/46514814-650b-4494-ae36-8a9806783944-crc-storage\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.583017 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/46514814-650b-4494-ae36-8a9806783944-node-mnt\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.613695 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chh94\" (UniqueName: \"kubernetes.io/projected/46514814-650b-4494-ae36-8a9806783944-kube-api-access-chh94\") pod \"crc-storage-crc-ptxsw\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:38 crc kubenswrapper[4941]: I1130 07:53:38.741567 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:39 crc kubenswrapper[4941]: I1130 07:53:39.257678 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ptxsw"] Nov 30 07:53:39 crc kubenswrapper[4941]: I1130 07:53:39.533078 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:53:39 crc kubenswrapper[4941]: E1130 07:53:39.533838 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:53:39 crc kubenswrapper[4941]: I1130 07:53:39.542492 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e15d36-2048-4136-8428-045598d4b13c" path="/var/lib/kubelet/pods/91e15d36-2048-4136-8428-045598d4b13c/volumes" Nov 30 07:53:39 crc kubenswrapper[4941]: I1130 07:53:39.735302 4941 scope.go:117] "RemoveContainer" containerID="37c1261a13fee2347e5c38fc1d203f431cf5b5e6fcdab49d922d941f419e5f05" Nov 30 07:53:39 crc kubenswrapper[4941]: I1130 07:53:39.950917 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ptxsw" event={"ID":"46514814-650b-4494-ae36-8a9806783944","Type":"ContainerStarted","Data":"65923c3a17098e241bca140e817121534bca1fcf17b954826c44fabf8025e076"} Nov 30 07:53:40 crc kubenswrapper[4941]: I1130 07:53:40.960661 4941 generic.go:334] "Generic (PLEG): container finished" podID="46514814-650b-4494-ae36-8a9806783944" containerID="67920c88ebf97c559f37fd492d8daf7c9f01a5b5eeec132b73cae7d3f75c2bef" exitCode=0 Nov 30 07:53:40 crc kubenswrapper[4941]: I1130 07:53:40.960704 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ptxsw" event={"ID":"46514814-650b-4494-ae36-8a9806783944","Type":"ContainerDied","Data":"67920c88ebf97c559f37fd492d8daf7c9f01a5b5eeec132b73cae7d3f75c2bef"} Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.360646 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.539263 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chh94\" (UniqueName: \"kubernetes.io/projected/46514814-650b-4494-ae36-8a9806783944-kube-api-access-chh94\") pod \"46514814-650b-4494-ae36-8a9806783944\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.539724 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/46514814-650b-4494-ae36-8a9806783944-node-mnt\") pod \"46514814-650b-4494-ae36-8a9806783944\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.539779 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/46514814-650b-4494-ae36-8a9806783944-crc-storage\") pod \"46514814-650b-4494-ae36-8a9806783944\" (UID: \"46514814-650b-4494-ae36-8a9806783944\") " Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.539827 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46514814-650b-4494-ae36-8a9806783944-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "46514814-650b-4494-ae36-8a9806783944" (UID: "46514814-650b-4494-ae36-8a9806783944"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.540236 4941 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/46514814-650b-4494-ae36-8a9806783944-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.544991 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46514814-650b-4494-ae36-8a9806783944-kube-api-access-chh94" (OuterVolumeSpecName: "kube-api-access-chh94") pod "46514814-650b-4494-ae36-8a9806783944" (UID: "46514814-650b-4494-ae36-8a9806783944"). InnerVolumeSpecName "kube-api-access-chh94". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.556797 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46514814-650b-4494-ae36-8a9806783944-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "46514814-650b-4494-ae36-8a9806783944" (UID: "46514814-650b-4494-ae36-8a9806783944"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.641437 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chh94\" (UniqueName: \"kubernetes.io/projected/46514814-650b-4494-ae36-8a9806783944-kube-api-access-chh94\") on node \"crc\" DevicePath \"\"" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.641760 4941 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/46514814-650b-4494-ae36-8a9806783944-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.994213 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ptxsw" event={"ID":"46514814-650b-4494-ae36-8a9806783944","Type":"ContainerDied","Data":"65923c3a17098e241bca140e817121534bca1fcf17b954826c44fabf8025e076"} Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.994286 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65923c3a17098e241bca140e817121534bca1fcf17b954826c44fabf8025e076" Nov 30 07:53:42 crc kubenswrapper[4941]: I1130 07:53:42.994308 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ptxsw" Nov 30 07:53:53 crc kubenswrapper[4941]: I1130 07:53:53.521905 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:53:53 crc kubenswrapper[4941]: E1130 07:53:53.523924 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:54:04 crc kubenswrapper[4941]: I1130 07:54:04.521657 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:54:04 crc kubenswrapper[4941]: E1130 07:54:04.522869 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.741785 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-88rtb"] Nov 30 07:54:06 crc kubenswrapper[4941]: E1130 07:54:06.742651 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46514814-650b-4494-ae36-8a9806783944" containerName="storage" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.742670 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="46514814-650b-4494-ae36-8a9806783944" containerName="storage" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.742858 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="46514814-650b-4494-ae36-8a9806783944" containerName="storage" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.744198 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.752726 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88rtb"] Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.924713 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-catalog-content\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.924781 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6cft\" (UniqueName: \"kubernetes.io/projected/bac76051-04ee-44b9-9a65-a4e74f5e3d96-kube-api-access-w6cft\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:06 crc kubenswrapper[4941]: I1130 07:54:06.924855 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-utilities\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.025748 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-utilities\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.025830 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-catalog-content\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.025897 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6cft\" (UniqueName: \"kubernetes.io/projected/bac76051-04ee-44b9-9a65-a4e74f5e3d96-kube-api-access-w6cft\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.026403 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-utilities\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.026767 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-catalog-content\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.192394 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6cft\" (UniqueName: \"kubernetes.io/projected/bac76051-04ee-44b9-9a65-a4e74f5e3d96-kube-api-access-w6cft\") pod \"redhat-operators-88rtb\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.381816 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:07 crc kubenswrapper[4941]: I1130 07:54:07.892853 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88rtb"] Nov 30 07:54:08 crc kubenswrapper[4941]: I1130 07:54:08.209606 4941 generic.go:334] "Generic (PLEG): container finished" podID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerID="71f30fbe10a7be1050dffe6b147e2ecc18b32b0ed74b459b765d9bfb9c44cd30" exitCode=0 Nov 30 07:54:08 crc kubenswrapper[4941]: I1130 07:54:08.209703 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerDied","Data":"71f30fbe10a7be1050dffe6b147e2ecc18b32b0ed74b459b765d9bfb9c44cd30"} Nov 30 07:54:08 crc kubenswrapper[4941]: I1130 07:54:08.209943 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerStarted","Data":"ad3696b64e08d419f7d82a251095475907e1c3204e09cfeaf4f672c78203cdf8"} Nov 30 07:54:09 crc kubenswrapper[4941]: I1130 07:54:09.222894 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerStarted","Data":"b9ef16902ba31401d9f37c9525269aa8c7405996b93198c53ebd2d77765d6557"} Nov 30 07:54:10 crc kubenswrapper[4941]: I1130 07:54:10.237551 4941 generic.go:334] "Generic (PLEG): container finished" podID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerID="b9ef16902ba31401d9f37c9525269aa8c7405996b93198c53ebd2d77765d6557" exitCode=0 Nov 30 07:54:10 crc kubenswrapper[4941]: I1130 07:54:10.237598 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerDied","Data":"b9ef16902ba31401d9f37c9525269aa8c7405996b93198c53ebd2d77765d6557"} Nov 30 07:54:11 crc kubenswrapper[4941]: I1130 07:54:11.248517 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerStarted","Data":"d04d726c02211dd157a0611956227b1b237d45063bd48a6517e25ebd0a153637"} Nov 30 07:54:17 crc kubenswrapper[4941]: I1130 07:54:17.382226 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:17 crc kubenswrapper[4941]: I1130 07:54:17.383544 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:17 crc kubenswrapper[4941]: I1130 07:54:17.448098 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:17 crc kubenswrapper[4941]: I1130 07:54:17.479534 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-88rtb" podStartSLOduration=8.726660619 podStartE2EDuration="11.479508616s" podCreationTimestamp="2025-11-30 07:54:06 +0000 UTC" firstStartedPulling="2025-11-30 07:54:08.210862471 +0000 UTC m=+4068.979034080" lastFinishedPulling="2025-11-30 07:54:10.963710468 +0000 UTC m=+4071.731882077" observedRunningTime="2025-11-30 07:54:11.273387647 +0000 UTC m=+4072.041559256" watchObservedRunningTime="2025-11-30 07:54:17.479508616 +0000 UTC m=+4078.247680235" Nov 30 07:54:18 crc kubenswrapper[4941]: I1130 07:54:18.368648 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:18 crc kubenswrapper[4941]: I1130 07:54:18.424741 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88rtb"] Nov 30 07:54:18 crc kubenswrapper[4941]: I1130 07:54:18.522678 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:54:18 crc kubenswrapper[4941]: E1130 07:54:18.523290 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:54:20 crc kubenswrapper[4941]: I1130 07:54:20.320066 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-88rtb" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="registry-server" containerID="cri-o://d04d726c02211dd157a0611956227b1b237d45063bd48a6517e25ebd0a153637" gracePeriod=2 Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.347127 4941 generic.go:334] "Generic (PLEG): container finished" podID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerID="d04d726c02211dd157a0611956227b1b237d45063bd48a6517e25ebd0a153637" exitCode=0 Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.347468 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerDied","Data":"d04d726c02211dd157a0611956227b1b237d45063bd48a6517e25ebd0a153637"} Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.604846 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.799911 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-utilities\") pod \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.800106 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6cft\" (UniqueName: \"kubernetes.io/projected/bac76051-04ee-44b9-9a65-a4e74f5e3d96-kube-api-access-w6cft\") pod \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.800146 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-catalog-content\") pod \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\" (UID: \"bac76051-04ee-44b9-9a65-a4e74f5e3d96\") " Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.801313 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-utilities" (OuterVolumeSpecName: "utilities") pod "bac76051-04ee-44b9-9a65-a4e74f5e3d96" (UID: "bac76051-04ee-44b9-9a65-a4e74f5e3d96"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.805992 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac76051-04ee-44b9-9a65-a4e74f5e3d96-kube-api-access-w6cft" (OuterVolumeSpecName: "kube-api-access-w6cft") pod "bac76051-04ee-44b9-9a65-a4e74f5e3d96" (UID: "bac76051-04ee-44b9-9a65-a4e74f5e3d96"). InnerVolumeSpecName "kube-api-access-w6cft". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.901344 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.901378 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6cft\" (UniqueName: \"kubernetes.io/projected/bac76051-04ee-44b9-9a65-a4e74f5e3d96-kube-api-access-w6cft\") on node \"crc\" DevicePath \"\"" Nov 30 07:54:22 crc kubenswrapper[4941]: I1130 07:54:22.915280 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bac76051-04ee-44b9-9a65-a4e74f5e3d96" (UID: "bac76051-04ee-44b9-9a65-a4e74f5e3d96"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.003268 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac76051-04ee-44b9-9a65-a4e74f5e3d96-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.357386 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88rtb" event={"ID":"bac76051-04ee-44b9-9a65-a4e74f5e3d96","Type":"ContainerDied","Data":"ad3696b64e08d419f7d82a251095475907e1c3204e09cfeaf4f672c78203cdf8"} Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.357472 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88rtb" Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.357845 4941 scope.go:117] "RemoveContainer" containerID="d04d726c02211dd157a0611956227b1b237d45063bd48a6517e25ebd0a153637" Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.393157 4941 scope.go:117] "RemoveContainer" containerID="b9ef16902ba31401d9f37c9525269aa8c7405996b93198c53ebd2d77765d6557" Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.394873 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88rtb"] Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.402619 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-88rtb"] Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.430532 4941 scope.go:117] "RemoveContainer" containerID="71f30fbe10a7be1050dffe6b147e2ecc18b32b0ed74b459b765d9bfb9c44cd30" Nov 30 07:54:23 crc kubenswrapper[4941]: I1130 07:54:23.534602 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" path="/var/lib/kubelet/pods/bac76051-04ee-44b9-9a65-a4e74f5e3d96/volumes" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.871946 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lvzgk"] Nov 30 07:54:24 crc kubenswrapper[4941]: E1130 07:54:24.873694 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="extract-content" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.873734 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="extract-content" Nov 30 07:54:24 crc kubenswrapper[4941]: E1130 07:54:24.873779 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="extract-utilities" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.873798 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="extract-utilities" Nov 30 07:54:24 crc kubenswrapper[4941]: E1130 07:54:24.873839 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="registry-server" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.873857 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="registry-server" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.874237 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="bac76051-04ee-44b9-9a65-a4e74f5e3d96" containerName="registry-server" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.877139 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:24 crc kubenswrapper[4941]: I1130 07:54:24.887977 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvzgk"] Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.037893 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-utilities\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.038568 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-catalog-content\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.038782 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g77l5\" (UniqueName: \"kubernetes.io/projected/fef96060-1846-424a-b236-0b1458c07022-kube-api-access-g77l5\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.139675 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-utilities\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.139781 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-catalog-content\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.139858 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g77l5\" (UniqueName: \"kubernetes.io/projected/fef96060-1846-424a-b236-0b1458c07022-kube-api-access-g77l5\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.140532 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-utilities\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.140684 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-catalog-content\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.180222 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g77l5\" (UniqueName: \"kubernetes.io/projected/fef96060-1846-424a-b236-0b1458c07022-kube-api-access-g77l5\") pod \"redhat-marketplace-lvzgk\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.250279 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:25 crc kubenswrapper[4941]: I1130 07:54:25.491876 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvzgk"] Nov 30 07:54:25 crc kubenswrapper[4941]: W1130 07:54:25.501764 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfef96060_1846_424a_b236_0b1458c07022.slice/crio-e3adbb8d70e9e24e89926b641f69287bf546955cd4bf5747422e268b57840ace WatchSource:0}: Error finding container e3adbb8d70e9e24e89926b641f69287bf546955cd4bf5747422e268b57840ace: Status 404 returned error can't find the container with id e3adbb8d70e9e24e89926b641f69287bf546955cd4bf5747422e268b57840ace Nov 30 07:54:26 crc kubenswrapper[4941]: I1130 07:54:26.397575 4941 generic.go:334] "Generic (PLEG): container finished" podID="fef96060-1846-424a-b236-0b1458c07022" containerID="2ebe684723b8863df9e7727a18a7ac74078354b7c458b4c4542b0e0f7fdfb035" exitCode=0 Nov 30 07:54:26 crc kubenswrapper[4941]: I1130 07:54:26.397677 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerDied","Data":"2ebe684723b8863df9e7727a18a7ac74078354b7c458b4c4542b0e0f7fdfb035"} Nov 30 07:54:26 crc kubenswrapper[4941]: I1130 07:54:26.397774 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerStarted","Data":"e3adbb8d70e9e24e89926b641f69287bf546955cd4bf5747422e268b57840ace"} Nov 30 07:54:27 crc kubenswrapper[4941]: I1130 07:54:27.408804 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerStarted","Data":"a07702c601879a0650091d30e972d3a6c2bf1d3ddff6b3d656832590f4f02611"} Nov 30 07:54:28 crc kubenswrapper[4941]: I1130 07:54:28.420130 4941 generic.go:334] "Generic (PLEG): container finished" podID="fef96060-1846-424a-b236-0b1458c07022" containerID="a07702c601879a0650091d30e972d3a6c2bf1d3ddff6b3d656832590f4f02611" exitCode=0 Nov 30 07:54:28 crc kubenswrapper[4941]: I1130 07:54:28.420213 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerDied","Data":"a07702c601879a0650091d30e972d3a6c2bf1d3ddff6b3d656832590f4f02611"} Nov 30 07:54:29 crc kubenswrapper[4941]: I1130 07:54:29.436069 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerStarted","Data":"90e43243d1d69406961db5a7733baeabc43d9dd66be945738bab740f93ee49a9"} Nov 30 07:54:29 crc kubenswrapper[4941]: I1130 07:54:29.469724 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lvzgk" podStartSLOduration=2.967028865 podStartE2EDuration="5.469706891s" podCreationTimestamp="2025-11-30 07:54:24 +0000 UTC" firstStartedPulling="2025-11-30 07:54:26.401974274 +0000 UTC m=+4087.170145883" lastFinishedPulling="2025-11-30 07:54:28.90465229 +0000 UTC m=+4089.672823909" observedRunningTime="2025-11-30 07:54:29.466776801 +0000 UTC m=+4090.234948430" watchObservedRunningTime="2025-11-30 07:54:29.469706891 +0000 UTC m=+4090.237878500" Nov 30 07:54:32 crc kubenswrapper[4941]: I1130 07:54:32.522123 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:54:32 crc kubenswrapper[4941]: E1130 07:54:32.522930 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:54:35 crc kubenswrapper[4941]: I1130 07:54:35.251000 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:35 crc kubenswrapper[4941]: I1130 07:54:35.251451 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:35 crc kubenswrapper[4941]: I1130 07:54:35.351107 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:35 crc kubenswrapper[4941]: I1130 07:54:35.550679 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:35 crc kubenswrapper[4941]: I1130 07:54:35.611868 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvzgk"] Nov 30 07:54:37 crc kubenswrapper[4941]: I1130 07:54:37.503928 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lvzgk" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="registry-server" containerID="cri-o://90e43243d1d69406961db5a7733baeabc43d9dd66be945738bab740f93ee49a9" gracePeriod=2 Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.535897 4941 generic.go:334] "Generic (PLEG): container finished" podID="fef96060-1846-424a-b236-0b1458c07022" containerID="90e43243d1d69406961db5a7733baeabc43d9dd66be945738bab740f93ee49a9" exitCode=0 Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.536364 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerDied","Data":"90e43243d1d69406961db5a7733baeabc43d9dd66be945738bab740f93ee49a9"} Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.536411 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lvzgk" event={"ID":"fef96060-1846-424a-b236-0b1458c07022","Type":"ContainerDied","Data":"e3adbb8d70e9e24e89926b641f69287bf546955cd4bf5747422e268b57840ace"} Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.536433 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3adbb8d70e9e24e89926b641f69287bf546955cd4bf5747422e268b57840ace" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.567974 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.680205 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-catalog-content\") pod \"fef96060-1846-424a-b236-0b1458c07022\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.680372 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-utilities\") pod \"fef96060-1846-424a-b236-0b1458c07022\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.680406 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g77l5\" (UniqueName: \"kubernetes.io/projected/fef96060-1846-424a-b236-0b1458c07022-kube-api-access-g77l5\") pod \"fef96060-1846-424a-b236-0b1458c07022\" (UID: \"fef96060-1846-424a-b236-0b1458c07022\") " Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.692845 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-utilities" (OuterVolumeSpecName: "utilities") pod "fef96060-1846-424a-b236-0b1458c07022" (UID: "fef96060-1846-424a-b236-0b1458c07022"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.701750 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fef96060-1846-424a-b236-0b1458c07022-kube-api-access-g77l5" (OuterVolumeSpecName: "kube-api-access-g77l5") pod "fef96060-1846-424a-b236-0b1458c07022" (UID: "fef96060-1846-424a-b236-0b1458c07022"). InnerVolumeSpecName "kube-api-access-g77l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.707639 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fef96060-1846-424a-b236-0b1458c07022" (UID: "fef96060-1846-424a-b236-0b1458c07022"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.782587 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.782783 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g77l5\" (UniqueName: \"kubernetes.io/projected/fef96060-1846-424a-b236-0b1458c07022-kube-api-access-g77l5\") on node \"crc\" DevicePath \"\"" Nov 30 07:54:38 crc kubenswrapper[4941]: I1130 07:54:38.782814 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef96060-1846-424a-b236-0b1458c07022-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:54:39 crc kubenswrapper[4941]: I1130 07:54:39.543900 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lvzgk" Nov 30 07:54:39 crc kubenswrapper[4941]: I1130 07:54:39.595124 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvzgk"] Nov 30 07:54:39 crc kubenswrapper[4941]: I1130 07:54:39.610077 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lvzgk"] Nov 30 07:54:41 crc kubenswrapper[4941]: I1130 07:54:41.531555 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fef96060-1846-424a-b236-0b1458c07022" path="/var/lib/kubelet/pods/fef96060-1846-424a-b236-0b1458c07022/volumes" Nov 30 07:54:43 crc kubenswrapper[4941]: I1130 07:54:43.522175 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:54:43 crc kubenswrapper[4941]: E1130 07:54:43.522492 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:54:54 crc kubenswrapper[4941]: I1130 07:54:54.522621 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:54:54 crc kubenswrapper[4941]: E1130 07:54:54.524265 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:55:06 crc kubenswrapper[4941]: I1130 07:55:06.521053 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:55:06 crc kubenswrapper[4941]: E1130 07:55:06.522070 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:55:17 crc kubenswrapper[4941]: I1130 07:55:17.521111 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:55:17 crc kubenswrapper[4941]: E1130 07:55:17.521862 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:55:30 crc kubenswrapper[4941]: I1130 07:55:30.521994 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:55:30 crc kubenswrapper[4941]: E1130 07:55:30.523078 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 07:55:45 crc kubenswrapper[4941]: I1130 07:55:45.522025 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:55:46 crc kubenswrapper[4941]: I1130 07:55:46.156795 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"355a16a1b52abc6a36991155e064942cfefe64c5bf70d9cbe30367f4a84ce847"} Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.811120 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v72fk"] Nov 30 07:55:53 crc kubenswrapper[4941]: E1130 07:55:53.822046 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="extract-utilities" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.822094 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="extract-utilities" Nov 30 07:55:53 crc kubenswrapper[4941]: E1130 07:55:53.822154 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="extract-content" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.822167 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="extract-content" Nov 30 07:55:53 crc kubenswrapper[4941]: E1130 07:55:53.822201 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="registry-server" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.822215 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="registry-server" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.822538 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fef96060-1846-424a-b236-0b1458c07022" containerName="registry-server" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.824431 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.824993 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v72fk"] Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.867089 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-262cq\" (UniqueName: \"kubernetes.io/projected/eb18ac9b-3f0d-4537-8399-4cebe341d22a-kube-api-access-262cq\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.867204 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-utilities\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.867488 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-catalog-content\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.969539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-262cq\" (UniqueName: \"kubernetes.io/projected/eb18ac9b-3f0d-4537-8399-4cebe341d22a-kube-api-access-262cq\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.969608 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-utilities\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.969761 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-catalog-content\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.970152 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-utilities\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.970210 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-catalog-content\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:53 crc kubenswrapper[4941]: I1130 07:55:53.991839 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-262cq\" (UniqueName: \"kubernetes.io/projected/eb18ac9b-3f0d-4537-8399-4cebe341d22a-kube-api-access-262cq\") pod \"community-operators-v72fk\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:54 crc kubenswrapper[4941]: I1130 07:55:54.167634 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:55:54 crc kubenswrapper[4941]: I1130 07:55:54.654906 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v72fk"] Nov 30 07:55:55 crc kubenswrapper[4941]: I1130 07:55:55.232703 4941 generic.go:334] "Generic (PLEG): container finished" podID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerID="5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11" exitCode=0 Nov 30 07:55:55 crc kubenswrapper[4941]: I1130 07:55:55.232834 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerDied","Data":"5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11"} Nov 30 07:55:55 crc kubenswrapper[4941]: I1130 07:55:55.233244 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerStarted","Data":"e023a070c3bebc831af3840eb0828b1d405aff362c8c6e3e17daecf8e65bb6b9"} Nov 30 07:55:55 crc kubenswrapper[4941]: I1130 07:55:55.236198 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 07:55:56 crc kubenswrapper[4941]: I1130 07:55:56.241492 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerStarted","Data":"0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407"} Nov 30 07:55:57 crc kubenswrapper[4941]: I1130 07:55:57.252129 4941 generic.go:334] "Generic (PLEG): container finished" podID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerID="0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407" exitCode=0 Nov 30 07:55:57 crc kubenswrapper[4941]: I1130 07:55:57.252180 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerDied","Data":"0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407"} Nov 30 07:55:58 crc kubenswrapper[4941]: I1130 07:55:58.268027 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerStarted","Data":"9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21"} Nov 30 07:55:58 crc kubenswrapper[4941]: I1130 07:55:58.314022 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v72fk" podStartSLOduration=2.705290744 podStartE2EDuration="5.313991889s" podCreationTimestamp="2025-11-30 07:55:53 +0000 UTC" firstStartedPulling="2025-11-30 07:55:55.235761974 +0000 UTC m=+4176.003933623" lastFinishedPulling="2025-11-30 07:55:57.844463149 +0000 UTC m=+4178.612634768" observedRunningTime="2025-11-30 07:55:58.297508389 +0000 UTC m=+4179.065680088" watchObservedRunningTime="2025-11-30 07:55:58.313991889 +0000 UTC m=+4179.082163538" Nov 30 07:56:04 crc kubenswrapper[4941]: I1130 07:56:04.168118 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:56:04 crc kubenswrapper[4941]: I1130 07:56:04.168267 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:56:04 crc kubenswrapper[4941]: I1130 07:56:04.218029 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:56:04 crc kubenswrapper[4941]: I1130 07:56:04.362498 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:56:06 crc kubenswrapper[4941]: I1130 07:56:06.584594 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v72fk"] Nov 30 07:56:07 crc kubenswrapper[4941]: I1130 07:56:07.350198 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v72fk" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="registry-server" containerID="cri-o://9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21" gracePeriod=2 Nov 30 07:56:07 crc kubenswrapper[4941]: I1130 07:56:07.820513 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.019489 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-utilities\") pod \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.019599 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-catalog-content\") pod \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.019692 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-262cq\" (UniqueName: \"kubernetes.io/projected/eb18ac9b-3f0d-4537-8399-4cebe341d22a-kube-api-access-262cq\") pod \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\" (UID: \"eb18ac9b-3f0d-4537-8399-4cebe341d22a\") " Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.020541 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-utilities" (OuterVolumeSpecName: "utilities") pod "eb18ac9b-3f0d-4537-8399-4cebe341d22a" (UID: "eb18ac9b-3f0d-4537-8399-4cebe341d22a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.031977 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb18ac9b-3f0d-4537-8399-4cebe341d22a-kube-api-access-262cq" (OuterVolumeSpecName: "kube-api-access-262cq") pod "eb18ac9b-3f0d-4537-8399-4cebe341d22a" (UID: "eb18ac9b-3f0d-4537-8399-4cebe341d22a"). InnerVolumeSpecName "kube-api-access-262cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.105553 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb18ac9b-3f0d-4537-8399-4cebe341d22a" (UID: "eb18ac9b-3f0d-4537-8399-4cebe341d22a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.121302 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.121362 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-262cq\" (UniqueName: \"kubernetes.io/projected/eb18ac9b-3f0d-4537-8399-4cebe341d22a-kube-api-access-262cq\") on node \"crc\" DevicePath \"\"" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.121381 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb18ac9b-3f0d-4537-8399-4cebe341d22a-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.380966 4941 generic.go:334] "Generic (PLEG): container finished" podID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerID="9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21" exitCode=0 Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.381013 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerDied","Data":"9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21"} Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.381044 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v72fk" event={"ID":"eb18ac9b-3f0d-4537-8399-4cebe341d22a","Type":"ContainerDied","Data":"e023a070c3bebc831af3840eb0828b1d405aff362c8c6e3e17daecf8e65bb6b9"} Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.381069 4941 scope.go:117] "RemoveContainer" containerID="9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.381207 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v72fk" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.421381 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v72fk"] Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.421817 4941 scope.go:117] "RemoveContainer" containerID="0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.431533 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v72fk"] Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.449067 4941 scope.go:117] "RemoveContainer" containerID="5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.485353 4941 scope.go:117] "RemoveContainer" containerID="9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21" Nov 30 07:56:08 crc kubenswrapper[4941]: E1130 07:56:08.485898 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21\": container with ID starting with 9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21 not found: ID does not exist" containerID="9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.485952 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21"} err="failed to get container status \"9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21\": rpc error: code = NotFound desc = could not find container \"9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21\": container with ID starting with 9050974eeb27479077b09400461a3030f62fa3cfca59d1ecad7e278092a04c21 not found: ID does not exist" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.485987 4941 scope.go:117] "RemoveContainer" containerID="0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407" Nov 30 07:56:08 crc kubenswrapper[4941]: E1130 07:56:08.486405 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407\": container with ID starting with 0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407 not found: ID does not exist" containerID="0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.486464 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407"} err="failed to get container status \"0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407\": rpc error: code = NotFound desc = could not find container \"0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407\": container with ID starting with 0af5620ea6a9ceaca3f3f11d1238668794953585af851eaf5888ef3c2b63f407 not found: ID does not exist" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.486489 4941 scope.go:117] "RemoveContainer" containerID="5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11" Nov 30 07:56:08 crc kubenswrapper[4941]: E1130 07:56:08.486904 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11\": container with ID starting with 5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11 not found: ID does not exist" containerID="5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11" Nov 30 07:56:08 crc kubenswrapper[4941]: I1130 07:56:08.486938 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11"} err="failed to get container status \"5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11\": rpc error: code = NotFound desc = could not find container \"5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11\": container with ID starting with 5b4a1c0091eb16729f20350c9f7d0f6880dd4948c942deac182fa47e688f9e11 not found: ID does not exist" Nov 30 07:56:09 crc kubenswrapper[4941]: I1130 07:56:09.540048 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" path="/var/lib/kubelet/pods/eb18ac9b-3f0d-4537-8399-4cebe341d22a/volumes" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.476254 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ffff64c9-xckjd"] Nov 30 07:56:54 crc kubenswrapper[4941]: E1130 07:56:54.477435 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="extract-utilities" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.477448 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="extract-utilities" Nov 30 07:56:54 crc kubenswrapper[4941]: E1130 07:56:54.477468 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="registry-server" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.477474 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="registry-server" Nov 30 07:56:54 crc kubenswrapper[4941]: E1130 07:56:54.477493 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="extract-content" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.477499 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="extract-content" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.477651 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb18ac9b-3f0d-4537-8399-4cebe341d22a" containerName="registry-server" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.478768 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.484974 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.485549 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.485944 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-mb242" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.486128 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.486197 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.502458 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffff64c9-xckjd"] Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.608696 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds4dg\" (UniqueName: \"kubernetes.io/projected/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-kube-api-access-ds4dg\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.609521 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-dns-svc\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.609661 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-config\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.711977 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds4dg\" (UniqueName: \"kubernetes.io/projected/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-kube-api-access-ds4dg\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.712495 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-dns-svc\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.712716 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-config\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.713870 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-dns-svc\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.714009 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-config\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.735634 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds4dg\" (UniqueName: \"kubernetes.io/projected/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-kube-api-access-ds4dg\") pod \"dnsmasq-dns-6ffff64c9-xckjd\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.753772 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bfb4449cc-mrwdv"] Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.755266 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.768315 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bfb4449cc-mrwdv"] Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.846582 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.919611 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g25b\" (UniqueName: \"kubernetes.io/projected/bc0c81bd-9a60-4038-b43f-3beef81d5600-kube-api-access-6g25b\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.919682 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-config\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:54 crc kubenswrapper[4941]: I1130 07:56:54.919754 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-dns-svc\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.025197 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g25b\" (UniqueName: \"kubernetes.io/projected/bc0c81bd-9a60-4038-b43f-3beef81d5600-kube-api-access-6g25b\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.025789 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-config\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.026050 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-dns-svc\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.027699 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-config\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.028708 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-dns-svc\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.065277 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g25b\" (UniqueName: \"kubernetes.io/projected/bc0c81bd-9a60-4038-b43f-3beef81d5600-kube-api-access-6g25b\") pod \"dnsmasq-dns-7bfb4449cc-mrwdv\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.077839 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.371084 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffff64c9-xckjd"] Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.563354 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bfb4449cc-mrwdv"] Nov 30 07:56:55 crc kubenswrapper[4941]: W1130 07:56:55.572089 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc0c81bd_9a60_4038_b43f_3beef81d5600.slice/crio-0f829c354a18b93cb4d631b5c489eed6b9ea0a86f4b506e8f4fc1aaf2b8dd9fb WatchSource:0}: Error finding container 0f829c354a18b93cb4d631b5c489eed6b9ea0a86f4b506e8f4fc1aaf2b8dd9fb: Status 404 returned error can't find the container with id 0f829c354a18b93cb4d631b5c489eed6b9ea0a86f4b506e8f4fc1aaf2b8dd9fb Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.629594 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.630833 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.637258 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.637375 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.637401 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.637265 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.637442 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rbj8q" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.666447 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.737994 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4d55\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-kube-api-access-g4d55\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738058 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3360921e-c026-4bfc-bcc6-e29ccf765618-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738100 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3360921e-c026-4bfc-bcc6-e29ccf765618-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738154 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738182 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738219 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738248 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738285 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.738303 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.840677 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.840751 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.840821 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.840857 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.840919 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.840961 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.841012 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4d55\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-kube-api-access-g4d55\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.841052 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3360921e-c026-4bfc-bcc6-e29ccf765618-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.841078 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3360921e-c026-4bfc-bcc6-e29ccf765618-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.841826 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.842250 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.842676 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.842794 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.844380 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.844415 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/42fd47e8672fe7d6b13f07ce29753092206a1f26916e2372627bef7b9f87db14/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.849083 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3360921e-c026-4bfc-bcc6-e29ccf765618-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.850900 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3360921e-c026-4bfc-bcc6-e29ccf765618-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.852689 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.856105 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" event={"ID":"bc0c81bd-9a60-4038-b43f-3beef81d5600","Type":"ContainerStarted","Data":"0f829c354a18b93cb4d631b5c489eed6b9ea0a86f4b506e8f4fc1aaf2b8dd9fb"} Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.858387 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" event={"ID":"8e7b7a1c-90ad-418a-9cf3-65b0f3380685","Type":"ContainerStarted","Data":"ab670963eb153a1d5f63d635f6fa849b7ad5a753319cedfca072c8f00f7afdf8"} Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.862117 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4d55\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-kube-api-access-g4d55\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.892214 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.961623 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.963400 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.967092 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-b5jx6" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.967698 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.969392 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.969548 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.969710 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.970045 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 30 07:56:55 crc kubenswrapper[4941]: I1130 07:56:55.979914 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.044236 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbf07016-68e0-44f2-8416-0d513af7ef27-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.044754 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.044786 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.044809 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbf07016-68e0-44f2-8416-0d513af7ef27-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.045039 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.045128 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.045402 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.045603 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.045845 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5g6r\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-kube-api-access-j5g6r\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147724 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbf07016-68e0-44f2-8416-0d513af7ef27-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147783 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147803 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147828 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbf07016-68e0-44f2-8416-0d513af7ef27-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147876 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147907 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147934 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.147974 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5g6r\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-kube-api-access-j5g6r\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.149678 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.149998 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.150285 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.151293 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.155169 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbf07016-68e0-44f2-8416-0d513af7ef27-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.155234 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.157975 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.158024 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0c5320f62a12f1d79f3f647820e5ebb65794e614a225e753220423e3ea970a41/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.163364 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbf07016-68e0-44f2-8416-0d513af7ef27-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.173091 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5g6r\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-kube-api-access-j5g6r\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.191302 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.358543 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.524490 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.886472 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3360921e-c026-4bfc-bcc6-e29ccf765618","Type":"ContainerStarted","Data":"4c26fcda5893861aac6596a82231918d586b4a47d52e99c3f1d5cbfe5e654bc3"} Nov 30 07:56:56 crc kubenswrapper[4941]: I1130 07:56:56.909091 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.048592 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.050059 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.054556 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.056880 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-rl4vk" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.057146 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.064504 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.064660 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.084342 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.175715 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dwd2\" (UniqueName: \"kubernetes.io/projected/e7d6456a-db62-4df3-856e-5896c4798d2f-kube-api-access-6dwd2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176367 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e7d6456a-db62-4df3-856e-5896c4798d2f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176447 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176513 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-config-data-default\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176551 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-kolla-config\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176585 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d6456a-db62-4df3-856e-5896c4798d2f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176685 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d6456a-db62-4df3-856e-5896c4798d2f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.176758 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282467 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dwd2\" (UniqueName: \"kubernetes.io/projected/e7d6456a-db62-4df3-856e-5896c4798d2f-kube-api-access-6dwd2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282512 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e7d6456a-db62-4df3-856e-5896c4798d2f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282562 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282613 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-config-data-default\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-kolla-config\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282698 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d6456a-db62-4df3-856e-5896c4798d2f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.282746 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d6456a-db62-4df3-856e-5896c4798d2f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.288890 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e7d6456a-db62-4df3-856e-5896c4798d2f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.291344 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.291286 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-kolla-config\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.296884 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e7d6456a-db62-4df3-856e-5896c4798d2f-config-data-default\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.298267 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d6456a-db62-4df3-856e-5896c4798d2f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.300511 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.300563 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7fe3be2461e68bd0198b0b0097f9a11cf66fcd5a0049a107aba3b3582999055d/globalmount\"" pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.308884 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d6456a-db62-4df3-856e-5896c4798d2f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.313062 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dwd2\" (UniqueName: \"kubernetes.io/projected/e7d6456a-db62-4df3-856e-5896c4798d2f-kube-api-access-6dwd2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.352943 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b405434-fdde-44fa-89e7-d2e0ccf757c2\") pod \"openstack-galera-0\" (UID: \"e7d6456a-db62-4df3-856e-5896c4798d2f\") " pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.388648 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.425137 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.434077 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.446573 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.449752 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-77klv" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.451814 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.489223 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f2c219f6-011a-47c3-81d2-99d151642f9a-config-data\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.489342 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b926t\" (UniqueName: \"kubernetes.io/projected/f2c219f6-011a-47c3-81d2-99d151642f9a-kube-api-access-b926t\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.489412 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2c219f6-011a-47c3-81d2-99d151642f9a-kolla-config\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.590764 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b926t\" (UniqueName: \"kubernetes.io/projected/f2c219f6-011a-47c3-81d2-99d151642f9a-kube-api-access-b926t\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.590848 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2c219f6-011a-47c3-81d2-99d151642f9a-kolla-config\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.590976 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f2c219f6-011a-47c3-81d2-99d151642f9a-config-data\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.591859 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f2c219f6-011a-47c3-81d2-99d151642f9a-kolla-config\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.591914 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f2c219f6-011a-47c3-81d2-99d151642f9a-config-data\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.614135 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b926t\" (UniqueName: \"kubernetes.io/projected/f2c219f6-011a-47c3-81d2-99d151642f9a-kube-api-access-b926t\") pod \"memcached-0\" (UID: \"f2c219f6-011a-47c3-81d2-99d151642f9a\") " pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: I1130 07:56:57.778877 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 30 07:56:57 crc kubenswrapper[4941]: W1130 07:56:57.991424 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbf07016_68e0_44f2_8416_0d513af7ef27.slice/crio-11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7 WatchSource:0}: Error finding container 11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7: Status 404 returned error can't find the container with id 11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7 Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.506499 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 30 07:56:58 crc kubenswrapper[4941]: W1130 07:56:58.522732 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7d6456a_db62_4df3_856e_5896c4798d2f.slice/crio-2b55f7768aea45b853efffc3961f4d2b17412f8d7634fb41b88387a7b0e8df7f WatchSource:0}: Error finding container 2b55f7768aea45b853efffc3961f4d2b17412f8d7634fb41b88387a7b0e8df7f: Status 404 returned error can't find the container with id 2b55f7768aea45b853efffc3961f4d2b17412f8d7634fb41b88387a7b0e8df7f Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.569972 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.583458 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.585543 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.588711 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.588756 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.588756 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-7lvbd" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.589166 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.599999 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.612141 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69a33c95-4877-49f2-9bcc-59d5c750a626-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.612206 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wznn\" (UniqueName: \"kubernetes.io/projected/69a33c95-4877-49f2-9bcc-59d5c750a626-kube-api-access-4wznn\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.613490 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.613726 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.613765 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/69a33c95-4877-49f2-9bcc-59d5c750a626-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.613789 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.613815 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.613937 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69a33c95-4877-49f2-9bcc-59d5c750a626-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: W1130 07:56:58.619027 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2c219f6_011a_47c3_81d2_99d151642f9a.slice/crio-80f3144328d286f83269eee6e777e90f36d7137d8c9c41571f9f7ddada3943d5 WatchSource:0}: Error finding container 80f3144328d286f83269eee6e777e90f36d7137d8c9c41571f9f7ddada3943d5: Status 404 returned error can't find the container with id 80f3144328d286f83269eee6e777e90f36d7137d8c9c41571f9f7ddada3943d5 Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.716371 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.716821 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.716854 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/69a33c95-4877-49f2-9bcc-59d5c750a626-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.716877 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.716905 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.716974 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69a33c95-4877-49f2-9bcc-59d5c750a626-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.717041 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69a33c95-4877-49f2-9bcc-59d5c750a626-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.717069 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wznn\" (UniqueName: \"kubernetes.io/projected/69a33c95-4877-49f2-9bcc-59d5c750a626-kube-api-access-4wznn\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.717465 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/69a33c95-4877-49f2-9bcc-59d5c750a626-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.718122 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.718222 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.718693 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/69a33c95-4877-49f2-9bcc-59d5c750a626-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.721621 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.721656 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ff5b8f3318cefe3a56a88bdb3b4538bc0edb7bca46ce70ad59cca2534e58adcf/globalmount\"" pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.725237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69a33c95-4877-49f2-9bcc-59d5c750a626-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.725492 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/69a33c95-4877-49f2-9bcc-59d5c750a626-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.738470 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wznn\" (UniqueName: \"kubernetes.io/projected/69a33c95-4877-49f2-9bcc-59d5c750a626-kube-api-access-4wznn\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.759224 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-20ade885-27e0-4728-84b2-ffc5e2ecc599\") pod \"openstack-cell1-galera-0\" (UID: \"69a33c95-4877-49f2-9bcc-59d5c750a626\") " pod="openstack/openstack-cell1-galera-0" Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.912206 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f2c219f6-011a-47c3-81d2-99d151642f9a","Type":"ContainerStarted","Data":"80f3144328d286f83269eee6e777e90f36d7137d8c9c41571f9f7ddada3943d5"} Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.915534 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbf07016-68e0-44f2-8416-0d513af7ef27","Type":"ContainerStarted","Data":"11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7"} Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.917393 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e7d6456a-db62-4df3-856e-5896c4798d2f","Type":"ContainerStarted","Data":"2b55f7768aea45b853efffc3961f4d2b17412f8d7634fb41b88387a7b0e8df7f"} Nov 30 07:56:58 crc kubenswrapper[4941]: I1130 07:56:58.932068 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 30 07:57:11 crc kubenswrapper[4941]: I1130 07:57:11.754280 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.034979 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"69a33c95-4877-49f2-9bcc-59d5c750a626","Type":"ContainerStarted","Data":"4f75c71bfefc4bbaaa74a52d45f52d4cde874245bdf787b1be2cd1c10e67d132"} Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.035022 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"69a33c95-4877-49f2-9bcc-59d5c750a626","Type":"ContainerStarted","Data":"1b9c48cfe825cfd3bfc0d3b67418ab1fd89810407bda6f5de58c84ebcde12d4e"} Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.036459 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f2c219f6-011a-47c3-81d2-99d151642f9a","Type":"ContainerStarted","Data":"3d390f2e948014aa006dd6bce96359d0e49eece5eb372bc6c5a593b521db41f7"} Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.037079 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.039223 4941 generic.go:334] "Generic (PLEG): container finished" podID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerID="de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1" exitCode=0 Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.039283 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" event={"ID":"8e7b7a1c-90ad-418a-9cf3-65b0f3380685","Type":"ContainerDied","Data":"de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1"} Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.040456 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e7d6456a-db62-4df3-856e-5896c4798d2f","Type":"ContainerStarted","Data":"b9b315657dca9df6ddfe87237b28704fdd09da3860c3f76562c91e5d4a3864c4"} Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.042917 4941 generic.go:334] "Generic (PLEG): container finished" podID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerID="9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c" exitCode=0 Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.042937 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" event={"ID":"bc0c81bd-9a60-4038-b43f-3beef81d5600","Type":"ContainerDied","Data":"9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c"} Nov 30 07:57:12 crc kubenswrapper[4941]: I1130 07:57:12.173823 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.451620891 podStartE2EDuration="15.173798333s" podCreationTimestamp="2025-11-30 07:56:57 +0000 UTC" firstStartedPulling="2025-11-30 07:56:58.62706264 +0000 UTC m=+4239.395234249" lastFinishedPulling="2025-11-30 07:57:11.349240082 +0000 UTC m=+4252.117411691" observedRunningTime="2025-11-30 07:57:12.147451199 +0000 UTC m=+4252.915622828" watchObservedRunningTime="2025-11-30 07:57:12.173798333 +0000 UTC m=+4252.941969952" Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.055030 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" event={"ID":"8e7b7a1c-90ad-418a-9cf3-65b0f3380685","Type":"ContainerStarted","Data":"dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e"} Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.055858 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.059365 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3360921e-c026-4bfc-bcc6-e29ccf765618","Type":"ContainerStarted","Data":"0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b"} Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.065717 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" event={"ID":"bc0c81bd-9a60-4038-b43f-3beef81d5600","Type":"ContainerStarted","Data":"77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3"} Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.065774 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.070626 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbf07016-68e0-44f2-8416-0d513af7ef27","Type":"ContainerStarted","Data":"eb2e132cf08c164859b147df3c343fa733d5c70ff30b2e720d50ec53352c217c"} Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.088775 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" podStartSLOduration=3.120241646 podStartE2EDuration="19.088745326s" podCreationTimestamp="2025-11-30 07:56:54 +0000 UTC" firstStartedPulling="2025-11-30 07:56:55.381714012 +0000 UTC m=+4236.149885631" lastFinishedPulling="2025-11-30 07:57:11.350217702 +0000 UTC m=+4252.118389311" observedRunningTime="2025-11-30 07:57:13.082474483 +0000 UTC m=+4253.850646142" watchObservedRunningTime="2025-11-30 07:57:13.088745326 +0000 UTC m=+4253.856916965" Nov 30 07:57:13 crc kubenswrapper[4941]: I1130 07:57:13.160709 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" podStartSLOduration=3.3551974270000002 podStartE2EDuration="19.16068828s" podCreationTimestamp="2025-11-30 07:56:54 +0000 UTC" firstStartedPulling="2025-11-30 07:56:55.575308355 +0000 UTC m=+4236.343479964" lastFinishedPulling="2025-11-30 07:57:11.380799198 +0000 UTC m=+4252.148970817" observedRunningTime="2025-11-30 07:57:13.157995327 +0000 UTC m=+4253.926166936" watchObservedRunningTime="2025-11-30 07:57:13.16068828 +0000 UTC m=+4253.928859889" Nov 30 07:57:15 crc kubenswrapper[4941]: I1130 07:57:15.104141 4941 generic.go:334] "Generic (PLEG): container finished" podID="e7d6456a-db62-4df3-856e-5896c4798d2f" containerID="b9b315657dca9df6ddfe87237b28704fdd09da3860c3f76562c91e5d4a3864c4" exitCode=0 Nov 30 07:57:15 crc kubenswrapper[4941]: I1130 07:57:15.104236 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e7d6456a-db62-4df3-856e-5896c4798d2f","Type":"ContainerDied","Data":"b9b315657dca9df6ddfe87237b28704fdd09da3860c3f76562c91e5d4a3864c4"} Nov 30 07:57:16 crc kubenswrapper[4941]: I1130 07:57:16.125416 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"e7d6456a-db62-4df3-856e-5896c4798d2f","Type":"ContainerStarted","Data":"48e0ffd1940085d9759483ba7f049039d735e95a96141abac068b16ee261fc29"} Nov 30 07:57:16 crc kubenswrapper[4941]: I1130 07:57:16.127822 4941 generic.go:334] "Generic (PLEG): container finished" podID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerID="4f75c71bfefc4bbaaa74a52d45f52d4cde874245bdf787b1be2cd1c10e67d132" exitCode=0 Nov 30 07:57:16 crc kubenswrapper[4941]: I1130 07:57:16.127886 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"69a33c95-4877-49f2-9bcc-59d5c750a626","Type":"ContainerDied","Data":"4f75c71bfefc4bbaaa74a52d45f52d4cde874245bdf787b1be2cd1c10e67d132"} Nov 30 07:57:16 crc kubenswrapper[4941]: I1130 07:57:16.175004 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.317511375 podStartE2EDuration="20.174981568s" podCreationTimestamp="2025-11-30 07:56:56 +0000 UTC" firstStartedPulling="2025-11-30 07:56:58.525547543 +0000 UTC m=+4239.293719172" lastFinishedPulling="2025-11-30 07:57:11.383017756 +0000 UTC m=+4252.151189365" observedRunningTime="2025-11-30 07:57:16.161290545 +0000 UTC m=+4256.929462164" watchObservedRunningTime="2025-11-30 07:57:16.174981568 +0000 UTC m=+4256.943153177" Nov 30 07:57:17 crc kubenswrapper[4941]: I1130 07:57:17.140938 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"69a33c95-4877-49f2-9bcc-59d5c750a626","Type":"ContainerStarted","Data":"3ce18f103e2844de45e1922f27f84b3dba67574aacab4781e46eb8c6fbbff31d"} Nov 30 07:57:17 crc kubenswrapper[4941]: I1130 07:57:17.191382 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=20.191320845 podStartE2EDuration="20.191320845s" podCreationTimestamp="2025-11-30 07:56:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:57:17.181763599 +0000 UTC m=+4257.949935248" watchObservedRunningTime="2025-11-30 07:57:17.191320845 +0000 UTC m=+4257.959492494" Nov 30 07:57:17 crc kubenswrapper[4941]: I1130 07:57:17.390176 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 30 07:57:17 crc kubenswrapper[4941]: I1130 07:57:17.390270 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 30 07:57:17 crc kubenswrapper[4941]: I1130 07:57:17.779973 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 30 07:57:18 crc kubenswrapper[4941]: I1130 07:57:18.933375 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 30 07:57:18 crc kubenswrapper[4941]: I1130 07:57:18.934067 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 30 07:57:19 crc kubenswrapper[4941]: I1130 07:57:19.849634 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.080294 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.136401 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffff64c9-xckjd"] Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.172844 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerName="dnsmasq-dns" containerID="cri-o://dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e" gracePeriod=10 Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.623528 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.806828 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-config\") pod \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.806983 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds4dg\" (UniqueName: \"kubernetes.io/projected/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-kube-api-access-ds4dg\") pod \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.807257 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-dns-svc\") pod \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\" (UID: \"8e7b7a1c-90ad-418a-9cf3-65b0f3380685\") " Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.813111 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-kube-api-access-ds4dg" (OuterVolumeSpecName: "kube-api-access-ds4dg") pod "8e7b7a1c-90ad-418a-9cf3-65b0f3380685" (UID: "8e7b7a1c-90ad-418a-9cf3-65b0f3380685"). InnerVolumeSpecName "kube-api-access-ds4dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.852483 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8e7b7a1c-90ad-418a-9cf3-65b0f3380685" (UID: "8e7b7a1c-90ad-418a-9cf3-65b0f3380685"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.875969 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-config" (OuterVolumeSpecName: "config") pod "8e7b7a1c-90ad-418a-9cf3-65b0f3380685" (UID: "8e7b7a1c-90ad-418a-9cf3-65b0f3380685"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.908903 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.908948 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds4dg\" (UniqueName: \"kubernetes.io/projected/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-kube-api-access-ds4dg\") on node \"crc\" DevicePath \"\"" Nov 30 07:57:20 crc kubenswrapper[4941]: I1130 07:57:20.908964 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8e7b7a1c-90ad-418a-9cf3-65b0f3380685-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.181974 4941 generic.go:334] "Generic (PLEG): container finished" podID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerID="dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e" exitCode=0 Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.182019 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" event={"ID":"8e7b7a1c-90ad-418a-9cf3-65b0f3380685","Type":"ContainerDied","Data":"dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e"} Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.182046 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" event={"ID":"8e7b7a1c-90ad-418a-9cf3-65b0f3380685","Type":"ContainerDied","Data":"ab670963eb153a1d5f63d635f6fa849b7ad5a753319cedfca072c8f00f7afdf8"} Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.182066 4941 scope.go:117] "RemoveContainer" containerID="dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.182087 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffff64c9-xckjd" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.208456 4941 scope.go:117] "RemoveContainer" containerID="de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.234740 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffff64c9-xckjd"] Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.237814 4941 scope.go:117] "RemoveContainer" containerID="dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e" Nov 30 07:57:21 crc kubenswrapper[4941]: E1130 07:57:21.238493 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e\": container with ID starting with dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e not found: ID does not exist" containerID="dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.238544 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e"} err="failed to get container status \"dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e\": rpc error: code = NotFound desc = could not find container \"dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e\": container with ID starting with dbd3d4a9b8484cbe5d2ff8e14f4c30516c46246a5a7ad266039d3e7a14f3fd7e not found: ID does not exist" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.238580 4941 scope.go:117] "RemoveContainer" containerID="de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1" Nov 30 07:57:21 crc kubenswrapper[4941]: E1130 07:57:21.239005 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1\": container with ID starting with de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1 not found: ID does not exist" containerID="de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.239026 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1"} err="failed to get container status \"de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1\": rpc error: code = NotFound desc = could not find container \"de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1\": container with ID starting with de8051754381d512eff2bfdef7028865462b7753ba7b0a9b6f9943b1888b0dc1 not found: ID does not exist" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.244280 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ffff64c9-xckjd"] Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.491786 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.531250 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" path="/var/lib/kubelet/pods/8e7b7a1c-90ad-418a-9cf3-65b0f3380685/volumes" Nov 30 07:57:21 crc kubenswrapper[4941]: I1130 07:57:21.585464 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 30 07:57:23 crc kubenswrapper[4941]: I1130 07:57:23.035726 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 30 07:57:23 crc kubenswrapper[4941]: I1130 07:57:23.118661 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 30 07:57:46 crc kubenswrapper[4941]: I1130 07:57:46.456390 4941 generic.go:334] "Generic (PLEG): container finished" podID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerID="0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b" exitCode=0 Nov 30 07:57:46 crc kubenswrapper[4941]: I1130 07:57:46.456501 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3360921e-c026-4bfc-bcc6-e29ccf765618","Type":"ContainerDied","Data":"0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b"} Nov 30 07:57:46 crc kubenswrapper[4941]: I1130 07:57:46.460673 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerID="eb2e132cf08c164859b147df3c343fa733d5c70ff30b2e720d50ec53352c217c" exitCode=0 Nov 30 07:57:46 crc kubenswrapper[4941]: I1130 07:57:46.461082 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbf07016-68e0-44f2-8416-0d513af7ef27","Type":"ContainerDied","Data":"eb2e132cf08c164859b147df3c343fa733d5c70ff30b2e720d50ec53352c217c"} Nov 30 07:57:47 crc kubenswrapper[4941]: I1130 07:57:47.474314 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3360921e-c026-4bfc-bcc6-e29ccf765618","Type":"ContainerStarted","Data":"b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd"} Nov 30 07:57:47 crc kubenswrapper[4941]: I1130 07:57:47.476538 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 30 07:57:47 crc kubenswrapper[4941]: I1130 07:57:47.477381 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbf07016-68e0-44f2-8416-0d513af7ef27","Type":"ContainerStarted","Data":"74745d844d492be8ef34831863f289903a07472e6439d6990103bea3a1035588"} Nov 30 07:57:47 crc kubenswrapper[4941]: I1130 07:57:47.477847 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:57:47 crc kubenswrapper[4941]: I1130 07:57:47.510615 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=40.574920398 podStartE2EDuration="53.510591138s" podCreationTimestamp="2025-11-30 07:56:54 +0000 UTC" firstStartedPulling="2025-11-30 07:56:56.540957965 +0000 UTC m=+4237.309129574" lastFinishedPulling="2025-11-30 07:57:09.476628675 +0000 UTC m=+4250.244800314" observedRunningTime="2025-11-30 07:57:47.510199936 +0000 UTC m=+4288.278371585" watchObservedRunningTime="2025-11-30 07:57:47.510591138 +0000 UTC m=+4288.278762757" Nov 30 07:57:47 crc kubenswrapper[4941]: I1130 07:57:47.548073 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=40.220103805 podStartE2EDuration="53.548052926s" podCreationTimestamp="2025-11-30 07:56:54 +0000 UTC" firstStartedPulling="2025-11-30 07:56:58.025351986 +0000 UTC m=+4238.793523595" lastFinishedPulling="2025-11-30 07:57:11.353301107 +0000 UTC m=+4252.121472716" observedRunningTime="2025-11-30 07:57:47.544763104 +0000 UTC m=+4288.312934713" watchObservedRunningTime="2025-11-30 07:57:47.548052926 +0000 UTC m=+4288.316224545" Nov 30 07:57:56 crc kubenswrapper[4941]: I1130 07:57:56.362638 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:02 crc kubenswrapper[4941]: I1130 07:58:02.978063 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:58:02 crc kubenswrapper[4941]: I1130 07:58:02.978543 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:58:05 crc kubenswrapper[4941]: I1130 07:58:05.973390 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 30 07:58:08 crc kubenswrapper[4941]: I1130 07:58:08.997627 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-645f94f75c-fcdsf"] Nov 30 07:58:09 crc kubenswrapper[4941]: E1130 07:58:09.000232 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerName="dnsmasq-dns" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.000401 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerName="dnsmasq-dns" Nov 30 07:58:09 crc kubenswrapper[4941]: E1130 07:58:09.000570 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerName="init" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.000687 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerName="init" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.000975 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e7b7a1c-90ad-418a-9cf3-65b0f3380685" containerName="dnsmasq-dns" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.002250 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.015161 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-645f94f75c-fcdsf"] Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.108095 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-config\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.108577 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5t5z\" (UniqueName: \"kubernetes.io/projected/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-kube-api-access-x5t5z\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.108942 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-dns-svc\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.210891 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-dns-svc\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.211001 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-config\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.211048 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5t5z\" (UniqueName: \"kubernetes.io/projected/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-kube-api-access-x5t5z\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.212468 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-dns-svc\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.212643 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-config\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.400759 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5t5z\" (UniqueName: \"kubernetes.io/projected/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-kube-api-access-x5t5z\") pod \"dnsmasq-dns-645f94f75c-fcdsf\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.657203 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:09 crc kubenswrapper[4941]: I1130 07:58:09.799516 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:58:10 crc kubenswrapper[4941]: I1130 07:58:10.206696 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-645f94f75c-fcdsf"] Nov 30 07:58:10 crc kubenswrapper[4941]: I1130 07:58:10.504811 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:58:10 crc kubenswrapper[4941]: I1130 07:58:10.687199 4941 generic.go:334] "Generic (PLEG): container finished" podID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerID="69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a" exitCode=0 Nov 30 07:58:10 crc kubenswrapper[4941]: I1130 07:58:10.687704 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" event={"ID":"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc","Type":"ContainerDied","Data":"69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a"} Nov 30 07:58:10 crc kubenswrapper[4941]: I1130 07:58:10.687763 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" event={"ID":"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc","Type":"ContainerStarted","Data":"9c96096de27d18b9fda8341a33e785083b797ede0672b9a1d529deb6a6dbca28"} Nov 30 07:58:11 crc kubenswrapper[4941]: I1130 07:58:11.697799 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" event={"ID":"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc","Type":"ContainerStarted","Data":"d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4"} Nov 30 07:58:11 crc kubenswrapper[4941]: I1130 07:58:11.698443 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:11 crc kubenswrapper[4941]: I1130 07:58:11.725304 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" podStartSLOduration=3.725284255 podStartE2EDuration="3.725284255s" podCreationTimestamp="2025-11-30 07:58:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:58:11.721658013 +0000 UTC m=+4312.489829622" watchObservedRunningTime="2025-11-30 07:58:11.725284255 +0000 UTC m=+4312.493455864" Nov 30 07:58:11 crc kubenswrapper[4941]: I1130 07:58:11.772422 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="rabbitmq" containerID="cri-o://b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd" gracePeriod=604799 Nov 30 07:58:12 crc kubenswrapper[4941]: I1130 07:58:12.454030 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="rabbitmq" containerID="cri-o://74745d844d492be8ef34831863f289903a07472e6439d6990103bea3a1035588" gracePeriod=604799 Nov 30 07:58:15 crc kubenswrapper[4941]: I1130 07:58:15.970593 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.232:5672: connect: connection refused" Nov 30 07:58:16 crc kubenswrapper[4941]: I1130 07:58:16.359487 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.233:5672: connect: connection refused" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.355572 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.430274 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4d55\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-kube-api-access-g4d55\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.430459 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.430545 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-confd\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.432358 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3360921e-c026-4bfc-bcc6-e29ccf765618-pod-info\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.432812 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-erlang-cookie\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.433633 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3360921e-c026-4bfc-bcc6-e29ccf765618-erlang-cookie-secret\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.433715 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-plugins\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.433729 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.433811 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-plugins-conf\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.433862 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-server-conf\") pod \"3360921e-c026-4bfc-bcc6-e29ccf765618\" (UID: \"3360921e-c026-4bfc-bcc6-e29ccf765618\") " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.434517 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.434686 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.434811 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.438938 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3360921e-c026-4bfc-bcc6-e29ccf765618-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.440046 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-kube-api-access-g4d55" (OuterVolumeSpecName: "kube-api-access-g4d55") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "kube-api-access-g4d55". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.479953 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3360921e-c026-4bfc-bcc6-e29ccf765618-pod-info" (OuterVolumeSpecName: "pod-info") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.481666 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366" (OuterVolumeSpecName: "persistence") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.483360 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-server-conf" (OuterVolumeSpecName: "server-conf") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.535957 4941 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.536020 4941 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3360921e-c026-4bfc-bcc6-e29ccf765618-server-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.536056 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4d55\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-kube-api-access-g4d55\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.536111 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") on node \"crc\" " Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.536122 4941 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3360921e-c026-4bfc-bcc6-e29ccf765618-pod-info\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.536132 4941 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3360921e-c026-4bfc-bcc6-e29ccf765618-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.536142 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.560245 4941 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.560579 4941 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366") on node "crc" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.579143 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3360921e-c026-4bfc-bcc6-e29ccf765618" (UID: "3360921e-c026-4bfc-bcc6-e29ccf765618"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.638470 4941 reconciler_common.go:293] "Volume detached for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.638508 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3360921e-c026-4bfc-bcc6-e29ccf765618-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.763055 4941 generic.go:334] "Generic (PLEG): container finished" podID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerID="b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd" exitCode=0 Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.763152 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3360921e-c026-4bfc-bcc6-e29ccf765618","Type":"ContainerDied","Data":"b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd"} Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.763260 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3360921e-c026-4bfc-bcc6-e29ccf765618","Type":"ContainerDied","Data":"4c26fcda5893861aac6596a82231918d586b4a47d52e99c3f1d5cbfe5e654bc3"} Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.763183 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.763309 4941 scope.go:117] "RemoveContainer" containerID="b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.770201 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerID="74745d844d492be8ef34831863f289903a07472e6439d6990103bea3a1035588" exitCode=0 Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.770239 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbf07016-68e0-44f2-8416-0d513af7ef27","Type":"ContainerDied","Data":"74745d844d492be8ef34831863f289903a07472e6439d6990103bea3a1035588"} Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.787046 4941 scope.go:117] "RemoveContainer" containerID="0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.814977 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.821617 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.854946 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:58:18 crc kubenswrapper[4941]: E1130 07:58:18.855370 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="setup-container" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.855393 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="setup-container" Nov 30 07:58:18 crc kubenswrapper[4941]: E1130 07:58:18.855419 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="rabbitmq" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.855426 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="rabbitmq" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.855692 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" containerName="rabbitmq" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.856745 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.863972 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rbj8q" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.864159 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.864211 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.864251 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.864435 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.884437 4941 scope.go:117] "RemoveContainer" containerID="b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.884859 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:58:18 crc kubenswrapper[4941]: E1130 07:58:18.886163 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd\": container with ID starting with b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd not found: ID does not exist" containerID="b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.886240 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd"} err="failed to get container status \"b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd\": rpc error: code = NotFound desc = could not find container \"b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd\": container with ID starting with b85a6441af7e2786584e791745251a10999bb5d08370734872132d1d1216b7bd not found: ID does not exist" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.886279 4941 scope.go:117] "RemoveContainer" containerID="0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b" Nov 30 07:58:18 crc kubenswrapper[4941]: E1130 07:58:18.886641 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b\": container with ID starting with 0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b not found: ID does not exist" containerID="0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.886670 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b"} err="failed to get container status \"0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b\": rpc error: code = NotFound desc = could not find container \"0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b\": container with ID starting with 0051f414935469efef4c5a5e15a744c79159cab3f36ea6ad87c974432d15241b not found: ID does not exist" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.950914 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951395 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7047253b-c52c-40a1-a44c-f20c5e5fcdac-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951416 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951433 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951492 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951507 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7047253b-c52c-40a1-a44c-f20c5e5fcdac-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951535 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7047253b-c52c-40a1-a44c-f20c5e5fcdac-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951554 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxg2g\" (UniqueName: \"kubernetes.io/projected/7047253b-c52c-40a1-a44c-f20c5e5fcdac-kube-api-access-wxg2g\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:18 crc kubenswrapper[4941]: I1130 07:58:18.951568 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7047253b-c52c-40a1-a44c-f20c5e5fcdac-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.022910 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.052088 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-confd\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.052385 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-plugins\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.052505 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbf07016-68e0-44f2-8416-0d513af7ef27-pod-info\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.052662 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-plugins-conf\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.052788 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-server-conf\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.052872 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5g6r\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-kube-api-access-j5g6r\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053083 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053180 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbf07016-68e0-44f2-8416-0d513af7ef27-erlang-cookie-secret\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053302 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-erlang-cookie\") pod \"dbf07016-68e0-44f2-8416-0d513af7ef27\" (UID: \"dbf07016-68e0-44f2-8416-0d513af7ef27\") " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053098 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053505 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxg2g\" (UniqueName: \"kubernetes.io/projected/7047253b-c52c-40a1-a44c-f20c5e5fcdac-kube-api-access-wxg2g\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053609 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7047253b-c52c-40a1-a44c-f20c5e5fcdac-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053817 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053883 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7047253b-c52c-40a1-a44c-f20c5e5fcdac-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053936 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.053968 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054195 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054227 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7047253b-c52c-40a1-a44c-f20c5e5fcdac-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054249 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054323 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7047253b-c52c-40a1-a44c-f20c5e5fcdac-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054424 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054449 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054482 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.054706 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7047253b-c52c-40a1-a44c-f20c5e5fcdac-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.055909 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7047253b-c52c-40a1-a44c-f20c5e5fcdac-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.056226 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.056832 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.058658 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.058707 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/42fd47e8672fe7d6b13f07ce29753092206a1f26916e2372627bef7b9f87db14/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.066364 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/dbf07016-68e0-44f2-8416-0d513af7ef27-pod-info" (OuterVolumeSpecName: "pod-info") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.067714 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7047253b-c52c-40a1-a44c-f20c5e5fcdac-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.069572 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbf07016-68e0-44f2-8416-0d513af7ef27-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.073124 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7047253b-c52c-40a1-a44c-f20c5e5fcdac-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.076302 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7047253b-c52c-40a1-a44c-f20c5e5fcdac-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.086993 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987" (OuterVolumeSpecName: "persistence") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.089126 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxg2g\" (UniqueName: \"kubernetes.io/projected/7047253b-c52c-40a1-a44c-f20c5e5fcdac-kube-api-access-wxg2g\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.089313 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-kube-api-access-j5g6r" (OuterVolumeSpecName: "kube-api-access-j5g6r") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "kube-api-access-j5g6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.102295 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fd61cc07-9699-4dc2-8a7a-500c82b24366\") pod \"rabbitmq-server-0\" (UID: \"7047253b-c52c-40a1-a44c-f20c5e5fcdac\") " pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.106430 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-server-conf" (OuterVolumeSpecName: "server-conf") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.146038 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "dbf07016-68e0-44f2-8416-0d513af7ef27" (UID: "dbf07016-68e0-44f2-8416-0d513af7ef27"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156280 4941 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156315 4941 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dbf07016-68e0-44f2-8416-0d513af7ef27-server-conf\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156699 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5g6r\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-kube-api-access-j5g6r\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156756 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") on node \"crc\" " Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156771 4941 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dbf07016-68e0-44f2-8416-0d513af7ef27-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156785 4941 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dbf07016-68e0-44f2-8416-0d513af7ef27-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.156797 4941 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dbf07016-68e0-44f2-8416-0d513af7ef27-pod-info\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.175925 4941 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.176299 4941 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987") on node "crc" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.186467 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.258784 4941 reconciler_common.go:293] "Volume detached for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.557921 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3360921e-c026-4bfc-bcc6-e29ccf765618" path="/var/lib/kubelet/pods/3360921e-c026-4bfc-bcc6-e29ccf765618/volumes" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.647649 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 30 07:58:19 crc kubenswrapper[4941]: W1130 07:58:19.653044 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7047253b_c52c_40a1_a44c_f20c5e5fcdac.slice/crio-92fd8a650a21b29ac1919c99a5d463c60f2db9afd4064e9a4958a539b77b65e0 WatchSource:0}: Error finding container 92fd8a650a21b29ac1919c99a5d463c60f2db9afd4064e9a4958a539b77b65e0: Status 404 returned error can't find the container with id 92fd8a650a21b29ac1919c99a5d463c60f2db9afd4064e9a4958a539b77b65e0 Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.661509 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.785890 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7047253b-c52c-40a1-a44c-f20c5e5fcdac","Type":"ContainerStarted","Data":"92fd8a650a21b29ac1919c99a5d463c60f2db9afd4064e9a4958a539b77b65e0"} Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.786987 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bfb4449cc-mrwdv"] Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.787227 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="dnsmasq-dns" containerID="cri-o://77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3" gracePeriod=10 Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.789888 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"dbf07016-68e0-44f2-8416-0d513af7ef27","Type":"ContainerDied","Data":"11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7"} Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.789923 4941 scope.go:117] "RemoveContainer" containerID="74745d844d492be8ef34831863f289903a07472e6439d6990103bea3a1035588" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.790012 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.842249 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.846695 4941 scope.go:117] "RemoveContainer" containerID="eb2e132cf08c164859b147df3c343fa733d5c70ff30b2e720d50ec53352c217c" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.864385 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.868979 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:58:19 crc kubenswrapper[4941]: E1130 07:58:19.869428 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="rabbitmq" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.869449 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="rabbitmq" Nov 30 07:58:19 crc kubenswrapper[4941]: E1130 07:58:19.869479 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="setup-container" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.869486 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="setup-container" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.869656 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" containerName="rabbitmq" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.870450 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.880555 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-b5jx6" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.880602 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.880567 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.880772 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.880830 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 30 07:58:19 crc kubenswrapper[4941]: I1130 07:58:19.895090 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.069661 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/293fc425-5b05-42e2-81c5-22e843125a15-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.069903 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.069962 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.070037 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/293fc425-5b05-42e2-81c5-22e843125a15-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.070188 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.070246 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/293fc425-5b05-42e2-81c5-22e843125a15-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.070277 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dfzm\" (UniqueName: \"kubernetes.io/projected/293fc425-5b05-42e2-81c5-22e843125a15-kube-api-access-4dfzm\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.070309 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/293fc425-5b05-42e2-81c5-22e843125a15-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.070363 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172169 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172591 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/293fc425-5b05-42e2-81c5-22e843125a15-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172648 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172668 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172694 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/293fc425-5b05-42e2-81c5-22e843125a15-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172734 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172756 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/293fc425-5b05-42e2-81c5-22e843125a15-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172775 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dfzm\" (UniqueName: \"kubernetes.io/projected/293fc425-5b05-42e2-81c5-22e843125a15-kube-api-access-4dfzm\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.172794 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/293fc425-5b05-42e2-81c5-22e843125a15-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.173018 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.173873 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/293fc425-5b05-42e2-81c5-22e843125a15-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.173960 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/293fc425-5b05-42e2-81c5-22e843125a15-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.175051 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.184677 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.184724 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0c5320f62a12f1d79f3f647820e5ebb65794e614a225e753220423e3ea970a41/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.185651 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/293fc425-5b05-42e2-81c5-22e843125a15-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.188903 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/293fc425-5b05-42e2-81c5-22e843125a15-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.195629 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/293fc425-5b05-42e2-81c5-22e843125a15-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.254159 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a6a54f9-3ca6-4df9-9e1c-0b844d24b987\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.289074 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dfzm\" (UniqueName: \"kubernetes.io/projected/293fc425-5b05-42e2-81c5-22e843125a15-kube-api-access-4dfzm\") pod \"rabbitmq-cell1-server-0\" (UID: \"293fc425-5b05-42e2-81c5-22e843125a15\") " pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.471629 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.503869 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.578254 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-dns-svc\") pod \"bc0c81bd-9a60-4038-b43f-3beef81d5600\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.578410 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g25b\" (UniqueName: \"kubernetes.io/projected/bc0c81bd-9a60-4038-b43f-3beef81d5600-kube-api-access-6g25b\") pod \"bc0c81bd-9a60-4038-b43f-3beef81d5600\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.578449 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-config\") pod \"bc0c81bd-9a60-4038-b43f-3beef81d5600\" (UID: \"bc0c81bd-9a60-4038-b43f-3beef81d5600\") " Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.688739 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc0c81bd-9a60-4038-b43f-3beef81d5600-kube-api-access-6g25b" (OuterVolumeSpecName: "kube-api-access-6g25b") pod "bc0c81bd-9a60-4038-b43f-3beef81d5600" (UID: "bc0c81bd-9a60-4038-b43f-3beef81d5600"). InnerVolumeSpecName "kube-api-access-6g25b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.784057 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g25b\" (UniqueName: \"kubernetes.io/projected/bc0c81bd-9a60-4038-b43f-3beef81d5600-kube-api-access-6g25b\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.821835 4941 generic.go:334] "Generic (PLEG): container finished" podID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerID="77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3" exitCode=0 Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.821919 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" event={"ID":"bc0c81bd-9a60-4038-b43f-3beef81d5600","Type":"ContainerDied","Data":"77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3"} Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.821971 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" event={"ID":"bc0c81bd-9a60-4038-b43f-3beef81d5600","Type":"ContainerDied","Data":"0f829c354a18b93cb4d631b5c489eed6b9ea0a86f4b506e8f4fc1aaf2b8dd9fb"} Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.822012 4941 scope.go:117] "RemoveContainer" containerID="77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.822237 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.931141 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc0c81bd-9a60-4038-b43f-3beef81d5600" (UID: "bc0c81bd-9a60-4038-b43f-3beef81d5600"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:58:20 crc kubenswrapper[4941]: I1130 07:58:20.988891 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.027542 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-config" (OuterVolumeSpecName: "config") pod "bc0c81bd-9a60-4038-b43f-3beef81d5600" (UID: "bc0c81bd-9a60-4038-b43f-3beef81d5600"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.082012 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.087311 4941 scope.go:117] "RemoveContainer" containerID="9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.090276 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc0c81bd-9a60-4038-b43f-3beef81d5600-config\") on node \"crc\" DevicePath \"\"" Nov 30 07:58:21 crc kubenswrapper[4941]: W1130 07:58:21.093167 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod293fc425_5b05_42e2_81c5_22e843125a15.slice/crio-d660cbc282fb9c7e4a6b169bc6551bd4030616d0afcb6bdcc7f264e938a451b8 WatchSource:0}: Error finding container d660cbc282fb9c7e4a6b169bc6551bd4030616d0afcb6bdcc7f264e938a451b8: Status 404 returned error can't find the container with id d660cbc282fb9c7e4a6b169bc6551bd4030616d0afcb6bdcc7f264e938a451b8 Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.162060 4941 scope.go:117] "RemoveContainer" containerID="77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3" Nov 30 07:58:21 crc kubenswrapper[4941]: E1130 07:58:21.162890 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3\": container with ID starting with 77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3 not found: ID does not exist" containerID="77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.162945 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3"} err="failed to get container status \"77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3\": rpc error: code = NotFound desc = could not find container \"77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3\": container with ID starting with 77bf1923d280c4c9e2949bc41abf5c925e6b90546ac8690eff2aa5684e4d24b3 not found: ID does not exist" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.162981 4941 scope.go:117] "RemoveContainer" containerID="9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c" Nov 30 07:58:21 crc kubenswrapper[4941]: E1130 07:58:21.163578 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c\": container with ID starting with 9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c not found: ID does not exist" containerID="9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.163609 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c"} err="failed to get container status \"9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c\": rpc error: code = NotFound desc = could not find container \"9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c\": container with ID starting with 9484ab13d1fbe7d19419098c0ac5a88afee6a0db42346b7d07aac16e41d4512c not found: ID does not exist" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.170295 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bfb4449cc-mrwdv"] Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.178922 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bfb4449cc-mrwdv"] Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.536212 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" path="/var/lib/kubelet/pods/bc0c81bd-9a60-4038-b43f-3beef81d5600/volumes" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.539501 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbf07016-68e0-44f2-8416-0d513af7ef27" path="/var/lib/kubelet/pods/dbf07016-68e0-44f2-8416-0d513af7ef27/volumes" Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.838231 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7047253b-c52c-40a1-a44c-f20c5e5fcdac","Type":"ContainerStarted","Data":"5d571a21d25b78f8d0c34e7e3654731464c4c35c06ff6ba2a6e0ccde3beb5f62"} Nov 30 07:58:21 crc kubenswrapper[4941]: I1130 07:58:21.840579 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"293fc425-5b05-42e2-81c5-22e843125a15","Type":"ContainerStarted","Data":"d660cbc282fb9c7e4a6b169bc6551bd4030616d0afcb6bdcc7f264e938a451b8"} Nov 30 07:58:23 crc kubenswrapper[4941]: I1130 07:58:23.866814 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"293fc425-5b05-42e2-81c5-22e843125a15","Type":"ContainerStarted","Data":"5f6da80afeb1e9b8a1c603f45f4727ad0f3f28de0bd4f9e3e4f1b79175b53cc4"} Nov 30 07:58:25 crc kubenswrapper[4941]: I1130 07:58:25.078939 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7bfb4449cc-mrwdv" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.231:5353: i/o timeout" Nov 30 07:58:28 crc kubenswrapper[4941]: E1130 07:58:28.697936 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbf07016_68e0_44f2_8416_0d513af7ef27.slice/crio-11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7\": RecentStats: unable to find data in memory cache]" Nov 30 07:58:32 crc kubenswrapper[4941]: I1130 07:58:32.978868 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:58:32 crc kubenswrapper[4941]: I1130 07:58:32.979835 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:58:38 crc kubenswrapper[4941]: E1130 07:58:38.932272 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbf07016_68e0_44f2_8416_0d513af7ef27.slice/crio-11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7\": RecentStats: unable to find data in memory cache]" Nov 30 07:58:49 crc kubenswrapper[4941]: E1130 07:58:49.154951 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbf07016_68e0_44f2_8416_0d513af7ef27.slice/crio-11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7\": RecentStats: unable to find data in memory cache]" Nov 30 07:58:55 crc kubenswrapper[4941]: I1130 07:58:55.162592 4941 generic.go:334] "Generic (PLEG): container finished" podID="7047253b-c52c-40a1-a44c-f20c5e5fcdac" containerID="5d571a21d25b78f8d0c34e7e3654731464c4c35c06ff6ba2a6e0ccde3beb5f62" exitCode=0 Nov 30 07:58:55 crc kubenswrapper[4941]: I1130 07:58:55.162732 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7047253b-c52c-40a1-a44c-f20c5e5fcdac","Type":"ContainerDied","Data":"5d571a21d25b78f8d0c34e7e3654731464c4c35c06ff6ba2a6e0ccde3beb5f62"} Nov 30 07:58:56 crc kubenswrapper[4941]: I1130 07:58:56.172070 4941 generic.go:334] "Generic (PLEG): container finished" podID="293fc425-5b05-42e2-81c5-22e843125a15" containerID="5f6da80afeb1e9b8a1c603f45f4727ad0f3f28de0bd4f9e3e4f1b79175b53cc4" exitCode=0 Nov 30 07:58:56 crc kubenswrapper[4941]: I1130 07:58:56.172158 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"293fc425-5b05-42e2-81c5-22e843125a15","Type":"ContainerDied","Data":"5f6da80afeb1e9b8a1c603f45f4727ad0f3f28de0bd4f9e3e4f1b79175b53cc4"} Nov 30 07:58:56 crc kubenswrapper[4941]: I1130 07:58:56.176659 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7047253b-c52c-40a1-a44c-f20c5e5fcdac","Type":"ContainerStarted","Data":"1a5025386fe335de7eddd7b46828007952a693774dd44f12ef361a43a793b069"} Nov 30 07:58:56 crc kubenswrapper[4941]: I1130 07:58:56.176946 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 30 07:58:56 crc kubenswrapper[4941]: I1130 07:58:56.250760 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.250739199 podStartE2EDuration="38.250739199s" podCreationTimestamp="2025-11-30 07:58:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:58:56.241779532 +0000 UTC m=+4357.009951141" watchObservedRunningTime="2025-11-30 07:58:56.250739199 +0000 UTC m=+4357.018910798" Nov 30 07:58:57 crc kubenswrapper[4941]: I1130 07:58:57.188551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"293fc425-5b05-42e2-81c5-22e843125a15","Type":"ContainerStarted","Data":"1c6bb977d98d19e8164a652c9fde6c69937761bdc08c2ae09f1fe204168d7262"} Nov 30 07:58:57 crc kubenswrapper[4941]: I1130 07:58:57.231886 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.231865418 podStartE2EDuration="38.231865418s" podCreationTimestamp="2025-11-30 07:58:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:58:57.228259477 +0000 UTC m=+4357.996431096" watchObservedRunningTime="2025-11-30 07:58:57.231865418 +0000 UTC m=+4358.000037017" Nov 30 07:58:59 crc kubenswrapper[4941]: E1130 07:58:59.347439 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbf07016_68e0_44f2_8416_0d513af7ef27.slice/crio-11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7\": RecentStats: unable to find data in memory cache]" Nov 30 07:59:00 crc kubenswrapper[4941]: I1130 07:59:00.504243 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:59:02 crc kubenswrapper[4941]: I1130 07:59:02.979024 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 07:59:02 crc kubenswrapper[4941]: I1130 07:59:02.979482 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 07:59:02 crc kubenswrapper[4941]: I1130 07:59:02.979536 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 07:59:02 crc kubenswrapper[4941]: I1130 07:59:02.980094 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"355a16a1b52abc6a36991155e064942cfefe64c5bf70d9cbe30367f4a84ce847"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 07:59:02 crc kubenswrapper[4941]: I1130 07:59:02.980166 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://355a16a1b52abc6a36991155e064942cfefe64c5bf70d9cbe30367f4a84ce847" gracePeriod=600 Nov 30 07:59:03 crc kubenswrapper[4941]: I1130 07:59:03.246574 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="355a16a1b52abc6a36991155e064942cfefe64c5bf70d9cbe30367f4a84ce847" exitCode=0 Nov 30 07:59:03 crc kubenswrapper[4941]: I1130 07:59:03.246648 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"355a16a1b52abc6a36991155e064942cfefe64c5bf70d9cbe30367f4a84ce847"} Nov 30 07:59:03 crc kubenswrapper[4941]: I1130 07:59:03.246722 4941 scope.go:117] "RemoveContainer" containerID="4cf7008c132d34d031e5a16008827dfaf2fcffaba317452150017e6d3d96cca7" Nov 30 07:59:04 crc kubenswrapper[4941]: I1130 07:59:04.258384 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7"} Nov 30 07:59:09 crc kubenswrapper[4941]: I1130 07:59:09.189578 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 30 07:59:09 crc kubenswrapper[4941]: E1130 07:59:09.541685 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbf07016_68e0_44f2_8416_0d513af7ef27.slice/crio-11079b8b1c0bb3d073da708fcefce53f020808ca64f1808cc12fcbb4133809d7\": RecentStats: unable to find data in memory cache]" Nov 30 07:59:10 crc kubenswrapper[4941]: I1130 07:59:10.512623 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.273280 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Nov 30 07:59:17 crc kubenswrapper[4941]: E1130 07:59:17.274713 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="init" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.274735 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="init" Nov 30 07:59:17 crc kubenswrapper[4941]: E1130 07:59:17.274757 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="dnsmasq-dns" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.274766 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="dnsmasq-dns" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.274966 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc0c81bd-9a60-4038-b43f-3beef81d5600" containerName="dnsmasq-dns" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.278065 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.280287 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.289416 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.334793 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb9nh\" (UniqueName: \"kubernetes.io/projected/75d59c21-2a3d-4105-9724-a11e534b4563-kube-api-access-nb9nh\") pod \"mariadb-client-1-default\" (UID: \"75d59c21-2a3d-4105-9724-a11e534b4563\") " pod="openstack/mariadb-client-1-default" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.436706 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb9nh\" (UniqueName: \"kubernetes.io/projected/75d59c21-2a3d-4105-9724-a11e534b4563-kube-api-access-nb9nh\") pod \"mariadb-client-1-default\" (UID: \"75d59c21-2a3d-4105-9724-a11e534b4563\") " pod="openstack/mariadb-client-1-default" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.464212 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb9nh\" (UniqueName: \"kubernetes.io/projected/75d59c21-2a3d-4105-9724-a11e534b4563-kube-api-access-nb9nh\") pod \"mariadb-client-1-default\" (UID: \"75d59c21-2a3d-4105-9724-a11e534b4563\") " pod="openstack/mariadb-client-1-default" Nov 30 07:59:17 crc kubenswrapper[4941]: I1130 07:59:17.602784 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 30 07:59:18 crc kubenswrapper[4941]: I1130 07:59:18.198913 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 30 07:59:18 crc kubenswrapper[4941]: I1130 07:59:18.387444 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"75d59c21-2a3d-4105-9724-a11e534b4563","Type":"ContainerStarted","Data":"7628188ed97f737d91c9c18f49109f4eb72444dd342c8717e5163a93f384a031"} Nov 30 07:59:19 crc kubenswrapper[4941]: I1130 07:59:19.398832 4941 generic.go:334] "Generic (PLEG): container finished" podID="75d59c21-2a3d-4105-9724-a11e534b4563" containerID="774a7ed7dbbc564b94d634d088218c81bd0bd1a86c37224c35ce648fa296d05a" exitCode=0 Nov 30 07:59:19 crc kubenswrapper[4941]: I1130 07:59:19.398911 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"75d59c21-2a3d-4105-9724-a11e534b4563","Type":"ContainerDied","Data":"774a7ed7dbbc564b94d634d088218c81bd0bd1a86c37224c35ce648fa296d05a"} Nov 30 07:59:20 crc kubenswrapper[4941]: I1130 07:59:20.959004 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 30 07:59:20 crc kubenswrapper[4941]: I1130 07:59:20.997546 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_75d59c21-2a3d-4105-9724-a11e534b4563/mariadb-client-1-default/0.log" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.026826 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.034966 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.109163 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb9nh\" (UniqueName: \"kubernetes.io/projected/75d59c21-2a3d-4105-9724-a11e534b4563-kube-api-access-nb9nh\") pod \"75d59c21-2a3d-4105-9724-a11e534b4563\" (UID: \"75d59c21-2a3d-4105-9724-a11e534b4563\") " Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.125510 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75d59c21-2a3d-4105-9724-a11e534b4563-kube-api-access-nb9nh" (OuterVolumeSpecName: "kube-api-access-nb9nh") pod "75d59c21-2a3d-4105-9724-a11e534b4563" (UID: "75d59c21-2a3d-4105-9724-a11e534b4563"). InnerVolumeSpecName "kube-api-access-nb9nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.211177 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb9nh\" (UniqueName: \"kubernetes.io/projected/75d59c21-2a3d-4105-9724-a11e534b4563-kube-api-access-nb9nh\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.426587 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7628188ed97f737d91c9c18f49109f4eb72444dd342c8717e5163a93f384a031" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.426724 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.532856 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75d59c21-2a3d-4105-9724-a11e534b4563" path="/var/lib/kubelet/pods/75d59c21-2a3d-4105-9724-a11e534b4563/volumes" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.585135 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Nov 30 07:59:21 crc kubenswrapper[4941]: E1130 07:59:21.585498 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d59c21-2a3d-4105-9724-a11e534b4563" containerName="mariadb-client-1-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.585511 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d59c21-2a3d-4105-9724-a11e534b4563" containerName="mariadb-client-1-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.585689 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d59c21-2a3d-4105-9724-a11e534b4563" containerName="mariadb-client-1-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.586218 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.591385 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.593994 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.719767 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnxn2\" (UniqueName: \"kubernetes.io/projected/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac-kube-api-access-nnxn2\") pod \"mariadb-client-2-default\" (UID: \"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac\") " pod="openstack/mariadb-client-2-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.820796 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnxn2\" (UniqueName: \"kubernetes.io/projected/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac-kube-api-access-nnxn2\") pod \"mariadb-client-2-default\" (UID: \"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac\") " pod="openstack/mariadb-client-2-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.839315 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnxn2\" (UniqueName: \"kubernetes.io/projected/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac-kube-api-access-nnxn2\") pod \"mariadb-client-2-default\" (UID: \"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac\") " pod="openstack/mariadb-client-2-default" Nov 30 07:59:21 crc kubenswrapper[4941]: I1130 07:59:21.919242 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 30 07:59:22 crc kubenswrapper[4941]: I1130 07:59:22.482381 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 30 07:59:22 crc kubenswrapper[4941]: W1130 07:59:22.496005 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ea6d2e4_32fd_44a3_b9a9_9f39a3c733ac.slice/crio-e6c8641a8bb596c15e489705b0e72ab512c4da5503fcec092be7571c83af53e2 WatchSource:0}: Error finding container e6c8641a8bb596c15e489705b0e72ab512c4da5503fcec092be7571c83af53e2: Status 404 returned error can't find the container with id e6c8641a8bb596c15e489705b0e72ab512c4da5503fcec092be7571c83af53e2 Nov 30 07:59:23 crc kubenswrapper[4941]: I1130 07:59:23.451939 4941 generic.go:334] "Generic (PLEG): container finished" podID="2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac" containerID="c7318f9d47e418dea615a8e2df0a409993737a1bca433947f932c592c23bb4ae" exitCode=1 Nov 30 07:59:23 crc kubenswrapper[4941]: I1130 07:59:23.452094 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac","Type":"ContainerDied","Data":"c7318f9d47e418dea615a8e2df0a409993737a1bca433947f932c592c23bb4ae"} Nov 30 07:59:23 crc kubenswrapper[4941]: I1130 07:59:23.452564 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac","Type":"ContainerStarted","Data":"e6c8641a8bb596c15e489705b0e72ab512c4da5503fcec092be7571c83af53e2"} Nov 30 07:59:24 crc kubenswrapper[4941]: I1130 07:59:24.946170 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 30 07:59:24 crc kubenswrapper[4941]: I1130 07:59:24.977486 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2-default_2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac/mariadb-client-2-default/0.log" Nov 30 07:59:24 crc kubenswrapper[4941]: I1130 07:59:24.986246 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnxn2\" (UniqueName: \"kubernetes.io/projected/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac-kube-api-access-nnxn2\") pod \"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac\" (UID: \"2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac\") " Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.017550 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac-kube-api-access-nnxn2" (OuterVolumeSpecName: "kube-api-access-nnxn2") pod "2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac" (UID: "2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac"). InnerVolumeSpecName "kube-api-access-nnxn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.037399 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.043289 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.088607 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnxn2\" (UniqueName: \"kubernetes.io/projected/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac-kube-api-access-nnxn2\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.475768 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6c8641a8bb596c15e489705b0e72ab512c4da5503fcec092be7571c83af53e2" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.476205 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.534403 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac" path="/var/lib/kubelet/pods/2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac/volumes" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.597745 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Nov 30 07:59:25 crc kubenswrapper[4941]: E1130 07:59:25.598647 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac" containerName="mariadb-client-2-default" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.598696 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac" containerName="mariadb-client-2-default" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.599121 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ea6d2e4-32fd-44a3-b9a9-9f39a3c733ac" containerName="mariadb-client-2-default" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.600567 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.603991 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.634820 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.700858 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzsng\" (UniqueName: \"kubernetes.io/projected/01285b12-55bf-4cd7-bc8f-d0fc35820faf-kube-api-access-lzsng\") pod \"mariadb-client-1\" (UID: \"01285b12-55bf-4cd7-bc8f-d0fc35820faf\") " pod="openstack/mariadb-client-1" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.803411 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzsng\" (UniqueName: \"kubernetes.io/projected/01285b12-55bf-4cd7-bc8f-d0fc35820faf-kube-api-access-lzsng\") pod \"mariadb-client-1\" (UID: \"01285b12-55bf-4cd7-bc8f-d0fc35820faf\") " pod="openstack/mariadb-client-1" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.832610 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzsng\" (UniqueName: \"kubernetes.io/projected/01285b12-55bf-4cd7-bc8f-d0fc35820faf-kube-api-access-lzsng\") pod \"mariadb-client-1\" (UID: \"01285b12-55bf-4cd7-bc8f-d0fc35820faf\") " pod="openstack/mariadb-client-1" Nov 30 07:59:25 crc kubenswrapper[4941]: I1130 07:59:25.919299 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 30 07:59:26 crc kubenswrapper[4941]: I1130 07:59:26.332111 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 30 07:59:26 crc kubenswrapper[4941]: I1130 07:59:26.483625 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"01285b12-55bf-4cd7-bc8f-d0fc35820faf","Type":"ContainerStarted","Data":"f5a496d0829c180a93491dda6df1655bd9a8c712d41bdc5272f91e25707d4b75"} Nov 30 07:59:27 crc kubenswrapper[4941]: I1130 07:59:27.494565 4941 generic.go:334] "Generic (PLEG): container finished" podID="01285b12-55bf-4cd7-bc8f-d0fc35820faf" containerID="5a55f8a0ca4152be72d920d5bc3c6ea5afe516745ef8a460bc1662bab6bb0195" exitCode=0 Nov 30 07:59:27 crc kubenswrapper[4941]: I1130 07:59:27.494727 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"01285b12-55bf-4cd7-bc8f-d0fc35820faf","Type":"ContainerDied","Data":"5a55f8a0ca4152be72d920d5bc3c6ea5afe516745ef8a460bc1662bab6bb0195"} Nov 30 07:59:28 crc kubenswrapper[4941]: I1130 07:59:28.870038 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 30 07:59:28 crc kubenswrapper[4941]: I1130 07:59:28.894861 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_01285b12-55bf-4cd7-bc8f-d0fc35820faf/mariadb-client-1/0.log" Nov 30 07:59:28 crc kubenswrapper[4941]: I1130 07:59:28.967233 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Nov 30 07:59:28 crc kubenswrapper[4941]: I1130 07:59:28.974177 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzsng\" (UniqueName: \"kubernetes.io/projected/01285b12-55bf-4cd7-bc8f-d0fc35820faf-kube-api-access-lzsng\") pod \"01285b12-55bf-4cd7-bc8f-d0fc35820faf\" (UID: \"01285b12-55bf-4cd7-bc8f-d0fc35820faf\") " Nov 30 07:59:28 crc kubenswrapper[4941]: I1130 07:59:28.989717 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Nov 30 07:59:28 crc kubenswrapper[4941]: I1130 07:59:28.999579 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01285b12-55bf-4cd7-bc8f-d0fc35820faf-kube-api-access-lzsng" (OuterVolumeSpecName: "kube-api-access-lzsng") pod "01285b12-55bf-4cd7-bc8f-d0fc35820faf" (UID: "01285b12-55bf-4cd7-bc8f-d0fc35820faf"). InnerVolumeSpecName "kube-api-access-lzsng". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.076604 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzsng\" (UniqueName: \"kubernetes.io/projected/01285b12-55bf-4cd7-bc8f-d0fc35820faf-kube-api-access-lzsng\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.510746 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Nov 30 07:59:29 crc kubenswrapper[4941]: E1130 07:59:29.511384 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01285b12-55bf-4cd7-bc8f-d0fc35820faf" containerName="mariadb-client-1" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.511410 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="01285b12-55bf-4cd7-bc8f-d0fc35820faf" containerName="mariadb-client-1" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.511908 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="01285b12-55bf-4cd7-bc8f-d0fc35820faf" containerName="mariadb-client-1" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.512956 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.515985 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5a496d0829c180a93491dda6df1655bd9a8c712d41bdc5272f91e25707d4b75" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.516128 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.520174 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.542034 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01285b12-55bf-4cd7-bc8f-d0fc35820faf" path="/var/lib/kubelet/pods/01285b12-55bf-4cd7-bc8f-d0fc35820faf/volumes" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.688979 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljvr8\" (UniqueName: \"kubernetes.io/projected/fc1663e6-e864-4b2f-a23c-52a19327d353-kube-api-access-ljvr8\") pod \"mariadb-client-4-default\" (UID: \"fc1663e6-e864-4b2f-a23c-52a19327d353\") " pod="openstack/mariadb-client-4-default" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.791676 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljvr8\" (UniqueName: \"kubernetes.io/projected/fc1663e6-e864-4b2f-a23c-52a19327d353-kube-api-access-ljvr8\") pod \"mariadb-client-4-default\" (UID: \"fc1663e6-e864-4b2f-a23c-52a19327d353\") " pod="openstack/mariadb-client-4-default" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.818380 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljvr8\" (UniqueName: \"kubernetes.io/projected/fc1663e6-e864-4b2f-a23c-52a19327d353-kube-api-access-ljvr8\") pod \"mariadb-client-4-default\" (UID: \"fc1663e6-e864-4b2f-a23c-52a19327d353\") " pod="openstack/mariadb-client-4-default" Nov 30 07:59:29 crc kubenswrapper[4941]: I1130 07:59:29.840895 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 30 07:59:30 crc kubenswrapper[4941]: I1130 07:59:30.499692 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 30 07:59:30 crc kubenswrapper[4941]: W1130 07:59:30.504220 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc1663e6_e864_4b2f_a23c_52a19327d353.slice/crio-64b6256d2a0e485c2cb0465ff4af9c7578ce535cb3cfcab447beed81e2ca6ebc WatchSource:0}: Error finding container 64b6256d2a0e485c2cb0465ff4af9c7578ce535cb3cfcab447beed81e2ca6ebc: Status 404 returned error can't find the container with id 64b6256d2a0e485c2cb0465ff4af9c7578ce535cb3cfcab447beed81e2ca6ebc Nov 30 07:59:30 crc kubenswrapper[4941]: I1130 07:59:30.528974 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"fc1663e6-e864-4b2f-a23c-52a19327d353","Type":"ContainerStarted","Data":"64b6256d2a0e485c2cb0465ff4af9c7578ce535cb3cfcab447beed81e2ca6ebc"} Nov 30 07:59:31 crc kubenswrapper[4941]: I1130 07:59:31.551396 4941 generic.go:334] "Generic (PLEG): container finished" podID="fc1663e6-e864-4b2f-a23c-52a19327d353" containerID="17f0f53c71700055e5abcb70f4da3f7fdc14fb4f0db1d4d5e2426c184452cabf" exitCode=0 Nov 30 07:59:31 crc kubenswrapper[4941]: I1130 07:59:31.551492 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"fc1663e6-e864-4b2f-a23c-52a19327d353","Type":"ContainerDied","Data":"17f0f53c71700055e5abcb70f4da3f7fdc14fb4f0db1d4d5e2426c184452cabf"} Nov 30 07:59:32 crc kubenswrapper[4941]: I1130 07:59:32.958621 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 30 07:59:32 crc kubenswrapper[4941]: I1130 07:59:32.983084 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_fc1663e6-e864-4b2f-a23c-52a19327d353/mariadb-client-4-default/0.log" Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.013794 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.021841 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.146958 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljvr8\" (UniqueName: \"kubernetes.io/projected/fc1663e6-e864-4b2f-a23c-52a19327d353-kube-api-access-ljvr8\") pod \"fc1663e6-e864-4b2f-a23c-52a19327d353\" (UID: \"fc1663e6-e864-4b2f-a23c-52a19327d353\") " Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.153213 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc1663e6-e864-4b2f-a23c-52a19327d353-kube-api-access-ljvr8" (OuterVolumeSpecName: "kube-api-access-ljvr8") pod "fc1663e6-e864-4b2f-a23c-52a19327d353" (UID: "fc1663e6-e864-4b2f-a23c-52a19327d353"). InnerVolumeSpecName "kube-api-access-ljvr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.249081 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljvr8\" (UniqueName: \"kubernetes.io/projected/fc1663e6-e864-4b2f-a23c-52a19327d353-kube-api-access-ljvr8\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.529112 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc1663e6-e864-4b2f-a23c-52a19327d353" path="/var/lib/kubelet/pods/fc1663e6-e864-4b2f-a23c-52a19327d353/volumes" Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.567404 4941 scope.go:117] "RemoveContainer" containerID="17f0f53c71700055e5abcb70f4da3f7fdc14fb4f0db1d4d5e2426c184452cabf" Nov 30 07:59:33 crc kubenswrapper[4941]: I1130 07:59:33.567422 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.444011 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Nov 30 07:59:36 crc kubenswrapper[4941]: E1130 07:59:36.445248 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc1663e6-e864-4b2f-a23c-52a19327d353" containerName="mariadb-client-4-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.445283 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc1663e6-e864-4b2f-a23c-52a19327d353" containerName="mariadb-client-4-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.445750 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc1663e6-e864-4b2f-a23c-52a19327d353" containerName="mariadb-client-4-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.446690 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.451858 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.458924 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.597998 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xblgh\" (UniqueName: \"kubernetes.io/projected/85c12187-bd89-43b1-b283-dfa4ac5596c5-kube-api-access-xblgh\") pod \"mariadb-client-5-default\" (UID: \"85c12187-bd89-43b1-b283-dfa4ac5596c5\") " pod="openstack/mariadb-client-5-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.699946 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xblgh\" (UniqueName: \"kubernetes.io/projected/85c12187-bd89-43b1-b283-dfa4ac5596c5-kube-api-access-xblgh\") pod \"mariadb-client-5-default\" (UID: \"85c12187-bd89-43b1-b283-dfa4ac5596c5\") " pod="openstack/mariadb-client-5-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.721131 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xblgh\" (UniqueName: \"kubernetes.io/projected/85c12187-bd89-43b1-b283-dfa4ac5596c5-kube-api-access-xblgh\") pod \"mariadb-client-5-default\" (UID: \"85c12187-bd89-43b1-b283-dfa4ac5596c5\") " pod="openstack/mariadb-client-5-default" Nov 30 07:59:36 crc kubenswrapper[4941]: I1130 07:59:36.785672 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 30 07:59:37 crc kubenswrapper[4941]: I1130 07:59:37.127585 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 30 07:59:37 crc kubenswrapper[4941]: I1130 07:59:37.599152 4941 generic.go:334] "Generic (PLEG): container finished" podID="85c12187-bd89-43b1-b283-dfa4ac5596c5" containerID="7a0f077f95025cb44c81b255691b35c5026d4052a4f58988b16cd7eb61c9c20c" exitCode=0 Nov 30 07:59:37 crc kubenswrapper[4941]: I1130 07:59:37.599197 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"85c12187-bd89-43b1-b283-dfa4ac5596c5","Type":"ContainerDied","Data":"7a0f077f95025cb44c81b255691b35c5026d4052a4f58988b16cd7eb61c9c20c"} Nov 30 07:59:37 crc kubenswrapper[4941]: I1130 07:59:37.599224 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"85c12187-bd89-43b1-b283-dfa4ac5596c5","Type":"ContainerStarted","Data":"597dc633e5652c3f26b397048a3d72f8b6bb4d278a4112e056c9e70fb5dcaf38"} Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.050925 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.075235 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_85c12187-bd89-43b1-b283-dfa4ac5596c5/mariadb-client-5-default/0.log" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.100600 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.107785 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.239471 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xblgh\" (UniqueName: \"kubernetes.io/projected/85c12187-bd89-43b1-b283-dfa4ac5596c5-kube-api-access-xblgh\") pod \"85c12187-bd89-43b1-b283-dfa4ac5596c5\" (UID: \"85c12187-bd89-43b1-b283-dfa4ac5596c5\") " Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.247427 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85c12187-bd89-43b1-b283-dfa4ac5596c5-kube-api-access-xblgh" (OuterVolumeSpecName: "kube-api-access-xblgh") pod "85c12187-bd89-43b1-b283-dfa4ac5596c5" (UID: "85c12187-bd89-43b1-b283-dfa4ac5596c5"). InnerVolumeSpecName "kube-api-access-xblgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.276758 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Nov 30 07:59:39 crc kubenswrapper[4941]: E1130 07:59:39.277191 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85c12187-bd89-43b1-b283-dfa4ac5596c5" containerName="mariadb-client-5-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.277215 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="85c12187-bd89-43b1-b283-dfa4ac5596c5" containerName="mariadb-client-5-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.277448 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="85c12187-bd89-43b1-b283-dfa4ac5596c5" containerName="mariadb-client-5-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.278185 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.292659 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.342029 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnkzk\" (UniqueName: \"kubernetes.io/projected/55fa4f2a-d221-44b0-959e-97ba3c9625b2-kube-api-access-dnkzk\") pod \"mariadb-client-6-default\" (UID: \"55fa4f2a-d221-44b0-959e-97ba3c9625b2\") " pod="openstack/mariadb-client-6-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.342225 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xblgh\" (UniqueName: \"kubernetes.io/projected/85c12187-bd89-43b1-b283-dfa4ac5596c5-kube-api-access-xblgh\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.443494 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnkzk\" (UniqueName: \"kubernetes.io/projected/55fa4f2a-d221-44b0-959e-97ba3c9625b2-kube-api-access-dnkzk\") pod \"mariadb-client-6-default\" (UID: \"55fa4f2a-d221-44b0-959e-97ba3c9625b2\") " pod="openstack/mariadb-client-6-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.466770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnkzk\" (UniqueName: \"kubernetes.io/projected/55fa4f2a-d221-44b0-959e-97ba3c9625b2-kube-api-access-dnkzk\") pod \"mariadb-client-6-default\" (UID: \"55fa4f2a-d221-44b0-959e-97ba3c9625b2\") " pod="openstack/mariadb-client-6-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.533017 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85c12187-bd89-43b1-b283-dfa4ac5596c5" path="/var/lib/kubelet/pods/85c12187-bd89-43b1-b283-dfa4ac5596c5/volumes" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.615509 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.617927 4941 scope.go:117] "RemoveContainer" containerID="7a0f077f95025cb44c81b255691b35c5026d4052a4f58988b16cd7eb61c9c20c" Nov 30 07:59:39 crc kubenswrapper[4941]: I1130 07:59:39.617955 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 30 07:59:40 crc kubenswrapper[4941]: I1130 07:59:40.125892 4941 scope.go:117] "RemoveContainer" containerID="b679814310c0d8bb482470a68c37d2b4050f27f4970c86a1d4d51ae7b76153be" Nov 30 07:59:40 crc kubenswrapper[4941]: I1130 07:59:40.168700 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 30 07:59:40 crc kubenswrapper[4941]: I1130 07:59:40.628754 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"55fa4f2a-d221-44b0-959e-97ba3c9625b2","Type":"ContainerStarted","Data":"5633d69fbb3392894a701318a148c8fafad108032963250efc86dde5a76f5a04"} Nov 30 07:59:40 crc kubenswrapper[4941]: I1130 07:59:40.629173 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"55fa4f2a-d221-44b0-959e-97ba3c9625b2","Type":"ContainerStarted","Data":"b90c4b5d7e28f413c115a3be1fbb37bd93fcb1e686bb49e33112bbe219b5ce0f"} Nov 30 07:59:40 crc kubenswrapper[4941]: I1130 07:59:40.650517 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=1.650468226 podStartE2EDuration="1.650468226s" podCreationTimestamp="2025-11-30 07:59:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 07:59:40.645549854 +0000 UTC m=+4401.413721463" watchObservedRunningTime="2025-11-30 07:59:40.650468226 +0000 UTC m=+4401.418639855" Nov 30 07:59:41 crc kubenswrapper[4941]: I1130 07:59:41.641202 4941 generic.go:334] "Generic (PLEG): container finished" podID="55fa4f2a-d221-44b0-959e-97ba3c9625b2" containerID="5633d69fbb3392894a701318a148c8fafad108032963250efc86dde5a76f5a04" exitCode=1 Nov 30 07:59:41 crc kubenswrapper[4941]: I1130 07:59:41.641305 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"55fa4f2a-d221-44b0-959e-97ba3c9625b2","Type":"ContainerDied","Data":"5633d69fbb3392894a701318a148c8fafad108032963250efc86dde5a76f5a04"} Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.094857 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.145762 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.155125 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.218356 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnkzk\" (UniqueName: \"kubernetes.io/projected/55fa4f2a-d221-44b0-959e-97ba3c9625b2-kube-api-access-dnkzk\") pod \"55fa4f2a-d221-44b0-959e-97ba3c9625b2\" (UID: \"55fa4f2a-d221-44b0-959e-97ba3c9625b2\") " Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.226378 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55fa4f2a-d221-44b0-959e-97ba3c9625b2-kube-api-access-dnkzk" (OuterVolumeSpecName: "kube-api-access-dnkzk") pod "55fa4f2a-d221-44b0-959e-97ba3c9625b2" (UID: "55fa4f2a-d221-44b0-959e-97ba3c9625b2"). InnerVolumeSpecName "kube-api-access-dnkzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.315300 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Nov 30 07:59:43 crc kubenswrapper[4941]: E1130 07:59:43.315851 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55fa4f2a-d221-44b0-959e-97ba3c9625b2" containerName="mariadb-client-6-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.315865 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="55fa4f2a-d221-44b0-959e-97ba3c9625b2" containerName="mariadb-client-6-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.316019 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="55fa4f2a-d221-44b0-959e-97ba3c9625b2" containerName="mariadb-client-6-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.316547 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.321043 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnkzk\" (UniqueName: \"kubernetes.io/projected/55fa4f2a-d221-44b0-959e-97ba3c9625b2-kube-api-access-dnkzk\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.330303 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.423014 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9rvt\" (UniqueName: \"kubernetes.io/projected/2986c9db-8f41-450f-890b-6e08a5f9445b-kube-api-access-f9rvt\") pod \"mariadb-client-7-default\" (UID: \"2986c9db-8f41-450f-890b-6e08a5f9445b\") " pod="openstack/mariadb-client-7-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.525441 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9rvt\" (UniqueName: \"kubernetes.io/projected/2986c9db-8f41-450f-890b-6e08a5f9445b-kube-api-access-f9rvt\") pod \"mariadb-client-7-default\" (UID: \"2986c9db-8f41-450f-890b-6e08a5f9445b\") " pod="openstack/mariadb-client-7-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.539595 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55fa4f2a-d221-44b0-959e-97ba3c9625b2" path="/var/lib/kubelet/pods/55fa4f2a-d221-44b0-959e-97ba3c9625b2/volumes" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.556687 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9rvt\" (UniqueName: \"kubernetes.io/projected/2986c9db-8f41-450f-890b-6e08a5f9445b-kube-api-access-f9rvt\") pod \"mariadb-client-7-default\" (UID: \"2986c9db-8f41-450f-890b-6e08a5f9445b\") " pod="openstack/mariadb-client-7-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.642746 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.680251 4941 scope.go:117] "RemoveContainer" containerID="5633d69fbb3392894a701318a148c8fafad108032963250efc86dde5a76f5a04" Nov 30 07:59:43 crc kubenswrapper[4941]: I1130 07:59:43.680452 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 30 07:59:45 crc kubenswrapper[4941]: I1130 07:59:45.012581 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 30 07:59:45 crc kubenswrapper[4941]: I1130 07:59:45.712842 4941 generic.go:334] "Generic (PLEG): container finished" podID="2986c9db-8f41-450f-890b-6e08a5f9445b" containerID="086ff21d8dbcd31d4d3049fd543ced42221c43b50c641963afca8519c649c103" exitCode=0 Nov 30 07:59:45 crc kubenswrapper[4941]: I1130 07:59:45.713018 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"2986c9db-8f41-450f-890b-6e08a5f9445b","Type":"ContainerDied","Data":"086ff21d8dbcd31d4d3049fd543ced42221c43b50c641963afca8519c649c103"} Nov 30 07:59:45 crc kubenswrapper[4941]: I1130 07:59:45.713471 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"2986c9db-8f41-450f-890b-6e08a5f9445b","Type":"ContainerStarted","Data":"4237995522cf906aaa206d7db9e76ee8ba0351a4c7583922f1ae99441d377554"} Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.075296 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.101417 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_2986c9db-8f41-450f-890b-6e08a5f9445b/mariadb-client-7-default/0.log" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.130718 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.138961 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.203186 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9rvt\" (UniqueName: \"kubernetes.io/projected/2986c9db-8f41-450f-890b-6e08a5f9445b-kube-api-access-f9rvt\") pod \"2986c9db-8f41-450f-890b-6e08a5f9445b\" (UID: \"2986c9db-8f41-450f-890b-6e08a5f9445b\") " Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.209157 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2986c9db-8f41-450f-890b-6e08a5f9445b-kube-api-access-f9rvt" (OuterVolumeSpecName: "kube-api-access-f9rvt") pod "2986c9db-8f41-450f-890b-6e08a5f9445b" (UID: "2986c9db-8f41-450f-890b-6e08a5f9445b"). InnerVolumeSpecName "kube-api-access-f9rvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.306494 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9rvt\" (UniqueName: \"kubernetes.io/projected/2986c9db-8f41-450f-890b-6e08a5f9445b-kube-api-access-f9rvt\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.329842 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Nov 30 07:59:47 crc kubenswrapper[4941]: E1130 07:59:47.330693 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2986c9db-8f41-450f-890b-6e08a5f9445b" containerName="mariadb-client-7-default" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.330734 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2986c9db-8f41-450f-890b-6e08a5f9445b" containerName="mariadb-client-7-default" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.331106 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2986c9db-8f41-450f-890b-6e08a5f9445b" containerName="mariadb-client-7-default" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.332281 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.337936 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.409576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qbc2\" (UniqueName: \"kubernetes.io/projected/fc85dcdf-7baa-48a2-be81-a95df10b680c-kube-api-access-4qbc2\") pod \"mariadb-client-2\" (UID: \"fc85dcdf-7baa-48a2-be81-a95df10b680c\") " pod="openstack/mariadb-client-2" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.511629 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qbc2\" (UniqueName: \"kubernetes.io/projected/fc85dcdf-7baa-48a2-be81-a95df10b680c-kube-api-access-4qbc2\") pod \"mariadb-client-2\" (UID: \"fc85dcdf-7baa-48a2-be81-a95df10b680c\") " pod="openstack/mariadb-client-2" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.533048 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2986c9db-8f41-450f-890b-6e08a5f9445b" path="/var/lib/kubelet/pods/2986c9db-8f41-450f-890b-6e08a5f9445b/volumes" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.538260 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qbc2\" (UniqueName: \"kubernetes.io/projected/fc85dcdf-7baa-48a2-be81-a95df10b680c-kube-api-access-4qbc2\") pod \"mariadb-client-2\" (UID: \"fc85dcdf-7baa-48a2-be81-a95df10b680c\") " pod="openstack/mariadb-client-2" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.657443 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.731114 4941 scope.go:117] "RemoveContainer" containerID="086ff21d8dbcd31d4d3049fd543ced42221c43b50c641963afca8519c649c103" Nov 30 07:59:47 crc kubenswrapper[4941]: I1130 07:59:47.731196 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 30 07:59:48 crc kubenswrapper[4941]: I1130 07:59:48.041132 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 30 07:59:48 crc kubenswrapper[4941]: I1130 07:59:48.769117 4941 generic.go:334] "Generic (PLEG): container finished" podID="fc85dcdf-7baa-48a2-be81-a95df10b680c" containerID="3b5417c20d1f573fa9266030863c68c6c5ba70c7b0eddf3c488dc9c96c6cc436" exitCode=0 Nov 30 07:59:48 crc kubenswrapper[4941]: I1130 07:59:48.769760 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"fc85dcdf-7baa-48a2-be81-a95df10b680c","Type":"ContainerDied","Data":"3b5417c20d1f573fa9266030863c68c6c5ba70c7b0eddf3c488dc9c96c6cc436"} Nov 30 07:59:48 crc kubenswrapper[4941]: I1130 07:59:48.769811 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"fc85dcdf-7baa-48a2-be81-a95df10b680c","Type":"ContainerStarted","Data":"4e1b406d0c359e2a7944f70aa9f569ecfeb4d0282886dc39ce3d8c2e72293b1a"} Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.659263 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.687986 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_fc85dcdf-7baa-48a2-be81-a95df10b680c/mariadb-client-2/0.log" Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.719828 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.726704 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.773269 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qbc2\" (UniqueName: \"kubernetes.io/projected/fc85dcdf-7baa-48a2-be81-a95df10b680c-kube-api-access-4qbc2\") pod \"fc85dcdf-7baa-48a2-be81-a95df10b680c\" (UID: \"fc85dcdf-7baa-48a2-be81-a95df10b680c\") " Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.780674 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc85dcdf-7baa-48a2-be81-a95df10b680c-kube-api-access-4qbc2" (OuterVolumeSpecName: "kube-api-access-4qbc2") pod "fc85dcdf-7baa-48a2-be81-a95df10b680c" (UID: "fc85dcdf-7baa-48a2-be81-a95df10b680c"). InnerVolumeSpecName "kube-api-access-4qbc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.794431 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e1b406d0c359e2a7944f70aa9f569ecfeb4d0282886dc39ce3d8c2e72293b1a" Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.794499 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 30 07:59:50 crc kubenswrapper[4941]: I1130 07:59:50.875776 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qbc2\" (UniqueName: \"kubernetes.io/projected/fc85dcdf-7baa-48a2-be81-a95df10b680c-kube-api-access-4qbc2\") on node \"crc\" DevicePath \"\"" Nov 30 07:59:51 crc kubenswrapper[4941]: I1130 07:59:51.538540 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc85dcdf-7baa-48a2-be81-a95df10b680c" path="/var/lib/kubelet/pods/fc85dcdf-7baa-48a2-be81-a95df10b680c/volumes" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.181008 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg"] Nov 30 08:00:00 crc kubenswrapper[4941]: E1130 08:00:00.182285 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc85dcdf-7baa-48a2-be81-a95df10b680c" containerName="mariadb-client-2" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.182337 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc85dcdf-7baa-48a2-be81-a95df10b680c" containerName="mariadb-client-2" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.182805 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc85dcdf-7baa-48a2-be81-a95df10b680c" containerName="mariadb-client-2" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.183616 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.186388 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.187421 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.190946 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg"] Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.263713 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/616a8168-57aa-4b46-b33d-9c09e769c0e4-secret-volume\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.263823 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56bp9\" (UniqueName: \"kubernetes.io/projected/616a8168-57aa-4b46-b33d-9c09e769c0e4-kube-api-access-56bp9\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.263850 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/616a8168-57aa-4b46-b33d-9c09e769c0e4-config-volume\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.365560 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56bp9\" (UniqueName: \"kubernetes.io/projected/616a8168-57aa-4b46-b33d-9c09e769c0e4-kube-api-access-56bp9\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.365625 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/616a8168-57aa-4b46-b33d-9c09e769c0e4-config-volume\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.365691 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/616a8168-57aa-4b46-b33d-9c09e769c0e4-secret-volume\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.367290 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/616a8168-57aa-4b46-b33d-9c09e769c0e4-config-volume\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.376826 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/616a8168-57aa-4b46-b33d-9c09e769c0e4-secret-volume\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.383899 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56bp9\" (UniqueName: \"kubernetes.io/projected/616a8168-57aa-4b46-b33d-9c09e769c0e4-kube-api-access-56bp9\") pod \"collect-profiles-29408160-tcnkg\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:00 crc kubenswrapper[4941]: I1130 08:00:00.510136 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.012834 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg"] Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.482460 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4qgv6"] Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.486029 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.503570 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qgv6"] Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.593575 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-utilities\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.593789 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-catalog-content\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.593845 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntlzs\" (UniqueName: \"kubernetes.io/projected/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-kube-api-access-ntlzs\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.695920 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-catalog-content\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.695981 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntlzs\" (UniqueName: \"kubernetes.io/projected/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-kube-api-access-ntlzs\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.696019 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-utilities\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.696428 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-catalog-content\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.696477 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-utilities\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.720894 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntlzs\" (UniqueName: \"kubernetes.io/projected/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-kube-api-access-ntlzs\") pod \"certified-operators-4qgv6\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.843200 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.901812 4941 generic.go:334] "Generic (PLEG): container finished" podID="616a8168-57aa-4b46-b33d-9c09e769c0e4" containerID="4ab5613d7584bc7779115f37e6d3e25ee552a5e462b8d63120eb728fa7bbec88" exitCode=0 Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.901894 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" event={"ID":"616a8168-57aa-4b46-b33d-9c09e769c0e4","Type":"ContainerDied","Data":"4ab5613d7584bc7779115f37e6d3e25ee552a5e462b8d63120eb728fa7bbec88"} Nov 30 08:00:01 crc kubenswrapper[4941]: I1130 08:00:01.901940 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" event={"ID":"616a8168-57aa-4b46-b33d-9c09e769c0e4","Type":"ContainerStarted","Data":"71f96a3bec3c2693ab5648e33c90f738526c4d935b63fe93e7d7a2f75c53dabf"} Nov 30 08:00:02 crc kubenswrapper[4941]: I1130 08:00:02.366007 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qgv6"] Nov 30 08:00:02 crc kubenswrapper[4941]: I1130 08:00:02.916964 4941 generic.go:334] "Generic (PLEG): container finished" podID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerID="3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60" exitCode=0 Nov 30 08:00:02 crc kubenswrapper[4941]: I1130 08:00:02.919282 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerDied","Data":"3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60"} Nov 30 08:00:02 crc kubenswrapper[4941]: I1130 08:00:02.919394 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerStarted","Data":"0e3386ddcf340c59864fbe805b0ecfb4255d47aba0f2ad11fe1c7c016a9c1ca3"} Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.557527 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.643104 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/616a8168-57aa-4b46-b33d-9c09e769c0e4-secret-volume\") pod \"616a8168-57aa-4b46-b33d-9c09e769c0e4\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.643162 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56bp9\" (UniqueName: \"kubernetes.io/projected/616a8168-57aa-4b46-b33d-9c09e769c0e4-kube-api-access-56bp9\") pod \"616a8168-57aa-4b46-b33d-9c09e769c0e4\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.643195 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/616a8168-57aa-4b46-b33d-9c09e769c0e4-config-volume\") pod \"616a8168-57aa-4b46-b33d-9c09e769c0e4\" (UID: \"616a8168-57aa-4b46-b33d-9c09e769c0e4\") " Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.645492 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/616a8168-57aa-4b46-b33d-9c09e769c0e4-config-volume" (OuterVolumeSpecName: "config-volume") pod "616a8168-57aa-4b46-b33d-9c09e769c0e4" (UID: "616a8168-57aa-4b46-b33d-9c09e769c0e4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.651580 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/616a8168-57aa-4b46-b33d-9c09e769c0e4-kube-api-access-56bp9" (OuterVolumeSpecName: "kube-api-access-56bp9") pod "616a8168-57aa-4b46-b33d-9c09e769c0e4" (UID: "616a8168-57aa-4b46-b33d-9c09e769c0e4"). InnerVolumeSpecName "kube-api-access-56bp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.654432 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/616a8168-57aa-4b46-b33d-9c09e769c0e4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "616a8168-57aa-4b46-b33d-9c09e769c0e4" (UID: "616a8168-57aa-4b46-b33d-9c09e769c0e4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.744975 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/616a8168-57aa-4b46-b33d-9c09e769c0e4-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.745019 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56bp9\" (UniqueName: \"kubernetes.io/projected/616a8168-57aa-4b46-b33d-9c09e769c0e4-kube-api-access-56bp9\") on node \"crc\" DevicePath \"\"" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.745029 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/616a8168-57aa-4b46-b33d-9c09e769c0e4-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.928739 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" event={"ID":"616a8168-57aa-4b46-b33d-9c09e769c0e4","Type":"ContainerDied","Data":"71f96a3bec3c2693ab5648e33c90f738526c4d935b63fe93e7d7a2f75c53dabf"} Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.929367 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71f96a3bec3c2693ab5648e33c90f738526c4d935b63fe93e7d7a2f75c53dabf" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.928783 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg" Nov 30 08:00:03 crc kubenswrapper[4941]: I1130 08:00:03.930912 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerStarted","Data":"b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6"} Nov 30 08:00:04 crc kubenswrapper[4941]: I1130 08:00:04.691770 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd"] Nov 30 08:00:04 crc kubenswrapper[4941]: I1130 08:00:04.699143 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408115-lntnd"] Nov 30 08:00:04 crc kubenswrapper[4941]: I1130 08:00:04.940987 4941 generic.go:334] "Generic (PLEG): container finished" podID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerID="b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6" exitCode=0 Nov 30 08:00:04 crc kubenswrapper[4941]: I1130 08:00:04.941047 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerDied","Data":"b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6"} Nov 30 08:00:05 crc kubenswrapper[4941]: I1130 08:00:05.534985 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d26ef129-af80-41e0-9457-f05a65547495" path="/var/lib/kubelet/pods/d26ef129-af80-41e0-9457-f05a65547495/volumes" Nov 30 08:00:06 crc kubenswrapper[4941]: I1130 08:00:06.964323 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerStarted","Data":"e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad"} Nov 30 08:00:06 crc kubenswrapper[4941]: I1130 08:00:06.987452 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4qgv6" podStartSLOduration=3.149404841 podStartE2EDuration="5.987423943s" podCreationTimestamp="2025-11-30 08:00:01 +0000 UTC" firstStartedPulling="2025-11-30 08:00:02.919918988 +0000 UTC m=+4423.688090597" lastFinishedPulling="2025-11-30 08:00:05.75793809 +0000 UTC m=+4426.526109699" observedRunningTime="2025-11-30 08:00:06.985925788 +0000 UTC m=+4427.754097397" watchObservedRunningTime="2025-11-30 08:00:06.987423943 +0000 UTC m=+4427.755595562" Nov 30 08:00:11 crc kubenswrapper[4941]: I1130 08:00:11.844347 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:11 crc kubenswrapper[4941]: I1130 08:00:11.845404 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:11 crc kubenswrapper[4941]: I1130 08:00:11.901700 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:12 crc kubenswrapper[4941]: I1130 08:00:12.060776 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:12 crc kubenswrapper[4941]: I1130 08:00:12.141663 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qgv6"] Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.033170 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4qgv6" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="registry-server" containerID="cri-o://e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad" gracePeriod=2 Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.446077 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.561180 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-catalog-content\") pod \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.561399 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-utilities\") pod \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.561426 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntlzs\" (UniqueName: \"kubernetes.io/projected/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-kube-api-access-ntlzs\") pod \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\" (UID: \"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068\") " Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.562710 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-utilities" (OuterVolumeSpecName: "utilities") pod "e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" (UID: "e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.580385 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-kube-api-access-ntlzs" (OuterVolumeSpecName: "kube-api-access-ntlzs") pod "e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" (UID: "e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068"). InnerVolumeSpecName "kube-api-access-ntlzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.664191 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.664233 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntlzs\" (UniqueName: \"kubernetes.io/projected/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-kube-api-access-ntlzs\") on node \"crc\" DevicePath \"\"" Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.759373 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" (UID: "e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:00:14 crc kubenswrapper[4941]: I1130 08:00:14.765614 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.049084 4941 generic.go:334] "Generic (PLEG): container finished" podID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerID="e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad" exitCode=0 Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.049197 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qgv6" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.049232 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerDied","Data":"e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad"} Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.050810 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qgv6" event={"ID":"e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068","Type":"ContainerDied","Data":"0e3386ddcf340c59864fbe805b0ecfb4255d47aba0f2ad11fe1c7c016a9c1ca3"} Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.050849 4941 scope.go:117] "RemoveContainer" containerID="e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.094406 4941 scope.go:117] "RemoveContainer" containerID="b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.107142 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qgv6"] Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.115582 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4qgv6"] Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.126412 4941 scope.go:117] "RemoveContainer" containerID="3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.165709 4941 scope.go:117] "RemoveContainer" containerID="e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad" Nov 30 08:00:15 crc kubenswrapper[4941]: E1130 08:00:15.166407 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad\": container with ID starting with e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad not found: ID does not exist" containerID="e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.166461 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad"} err="failed to get container status \"e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad\": rpc error: code = NotFound desc = could not find container \"e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad\": container with ID starting with e1266a9d59640b531c93b2e3815f895fa73369fc5dc2d51617af7b46d5c748ad not found: ID does not exist" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.166514 4941 scope.go:117] "RemoveContainer" containerID="b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6" Nov 30 08:00:15 crc kubenswrapper[4941]: E1130 08:00:15.167192 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6\": container with ID starting with b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6 not found: ID does not exist" containerID="b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.167237 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6"} err="failed to get container status \"b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6\": rpc error: code = NotFound desc = could not find container \"b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6\": container with ID starting with b9c441155fbd5da2d6c15e08d049cc4f2560ca10222353118fe9962a9236faa6 not found: ID does not exist" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.167263 4941 scope.go:117] "RemoveContainer" containerID="3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60" Nov 30 08:00:15 crc kubenswrapper[4941]: E1130 08:00:15.168014 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60\": container with ID starting with 3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60 not found: ID does not exist" containerID="3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.168072 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60"} err="failed to get container status \"3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60\": rpc error: code = NotFound desc = could not find container \"3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60\": container with ID starting with 3878726a29132a7b5dea9179f2f081ae37ab861bdbaea72fc8137ad323f42c60 not found: ID does not exist" Nov 30 08:00:15 crc kubenswrapper[4941]: I1130 08:00:15.537634 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" path="/var/lib/kubelet/pods/e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068/volumes" Nov 30 08:00:40 crc kubenswrapper[4941]: I1130 08:00:40.243550 4941 scope.go:117] "RemoveContainer" containerID="2ebe684723b8863df9e7727a18a7ac74078354b7c458b4c4542b0e0f7fdfb035" Nov 30 08:00:40 crc kubenswrapper[4941]: I1130 08:00:40.274974 4941 scope.go:117] "RemoveContainer" containerID="8411c669ce5a701f951a585beb3e720ee0a47eb60e9d73b9f1a23d040028d327" Nov 30 08:00:40 crc kubenswrapper[4941]: I1130 08:00:40.314419 4941 scope.go:117] "RemoveContainer" containerID="a07702c601879a0650091d30e972d3a6c2bf1d3ddff6b3d656832590f4f02611" Nov 30 08:00:40 crc kubenswrapper[4941]: I1130 08:00:40.346931 4941 scope.go:117] "RemoveContainer" containerID="90e43243d1d69406961db5a7733baeabc43d9dd66be945738bab740f93ee49a9" Nov 30 08:01:32 crc kubenswrapper[4941]: I1130 08:01:32.978793 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:01:32 crc kubenswrapper[4941]: I1130 08:01:32.979685 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:02:02 crc kubenswrapper[4941]: I1130 08:02:02.978736 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:02:02 crc kubenswrapper[4941]: I1130 08:02:02.979398 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:02:32 crc kubenswrapper[4941]: I1130 08:02:32.978612 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:02:32 crc kubenswrapper[4941]: I1130 08:02:32.979385 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:02:32 crc kubenswrapper[4941]: I1130 08:02:32.979463 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:02:32 crc kubenswrapper[4941]: I1130 08:02:32.980488 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:02:32 crc kubenswrapper[4941]: I1130 08:02:32.980579 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" gracePeriod=600 Nov 30 08:02:33 crc kubenswrapper[4941]: E1130 08:02:33.119199 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:02:33 crc kubenswrapper[4941]: I1130 08:02:33.544615 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" exitCode=0 Nov 30 08:02:33 crc kubenswrapper[4941]: I1130 08:02:33.544825 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7"} Nov 30 08:02:33 crc kubenswrapper[4941]: I1130 08:02:33.545226 4941 scope.go:117] "RemoveContainer" containerID="355a16a1b52abc6a36991155e064942cfefe64c5bf70d9cbe30367f4a84ce847" Nov 30 08:02:33 crc kubenswrapper[4941]: I1130 08:02:33.546689 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:02:33 crc kubenswrapper[4941]: E1130 08:02:33.547111 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:02:47 crc kubenswrapper[4941]: I1130 08:02:47.522495 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:02:47 crc kubenswrapper[4941]: E1130 08:02:47.525707 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:02:59 crc kubenswrapper[4941]: I1130 08:02:59.532489 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:02:59 crc kubenswrapper[4941]: E1130 08:02:59.534171 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:03:13 crc kubenswrapper[4941]: I1130 08:03:13.522208 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:03:13 crc kubenswrapper[4941]: E1130 08:03:13.523302 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.108701 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Nov 30 08:03:18 crc kubenswrapper[4941]: E1130 08:03:18.109848 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="extract-content" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.109866 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="extract-content" Nov 30 08:03:18 crc kubenswrapper[4941]: E1130 08:03:18.109886 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="registry-server" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.109892 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="registry-server" Nov 30 08:03:18 crc kubenswrapper[4941]: E1130 08:03:18.109903 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="extract-utilities" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.109909 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="extract-utilities" Nov 30 08:03:18 crc kubenswrapper[4941]: E1130 08:03:18.109928 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="616a8168-57aa-4b46-b33d-9c09e769c0e4" containerName="collect-profiles" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.109935 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="616a8168-57aa-4b46-b33d-9c09e769c0e4" containerName="collect-profiles" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.110081 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="616a8168-57aa-4b46-b33d-9c09e769c0e4" containerName="collect-profiles" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.110111 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2c4c88b-fc4b-43dc-8a02-e0ed4d09a068" containerName="registry-server" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.110843 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.121451 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.148752 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.250844 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.251005 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79kws\" (UniqueName: \"kubernetes.io/projected/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0-kube-api-access-79kws\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.353119 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79kws\" (UniqueName: \"kubernetes.io/projected/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0-kube-api-access-79kws\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.353217 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.357212 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.357256 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d575c39a331c65ef2815dfc30772a1d116fd37b105b46bd13ac1cb763436b3c4/globalmount\"" pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.374739 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79kws\" (UniqueName: \"kubernetes.io/projected/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0-kube-api-access-79kws\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.393495 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") pod \"mariadb-copy-data\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " pod="openstack/mariadb-copy-data" Nov 30 08:03:18 crc kubenswrapper[4941]: I1130 08:03:18.466282 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 30 08:03:19 crc kubenswrapper[4941]: I1130 08:03:19.042598 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 30 08:03:20 crc kubenswrapper[4941]: I1130 08:03:20.035999 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0","Type":"ContainerStarted","Data":"b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae"} Nov 30 08:03:20 crc kubenswrapper[4941]: I1130 08:03:20.036623 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0","Type":"ContainerStarted","Data":"e65cc1189defc74db3b9dd09251dd439c51977d6c768210224c1802e2fa09c27"} Nov 30 08:03:20 crc kubenswrapper[4941]: I1130 08:03:20.069822 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.069785112 podStartE2EDuration="3.069785112s" podCreationTimestamp="2025-11-30 08:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:03:20.059829374 +0000 UTC m=+4620.828001063" watchObservedRunningTime="2025-11-30 08:03:20.069785112 +0000 UTC m=+4620.837956761" Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.291019 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.293644 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.322584 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.457159 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zz6s\" (UniqueName: \"kubernetes.io/projected/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b-kube-api-access-6zz6s\") pod \"mariadb-client\" (UID: \"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b\") " pod="openstack/mariadb-client" Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.558970 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zz6s\" (UniqueName: \"kubernetes.io/projected/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b-kube-api-access-6zz6s\") pod \"mariadb-client\" (UID: \"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b\") " pod="openstack/mariadb-client" Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.588994 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zz6s\" (UniqueName: \"kubernetes.io/projected/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b-kube-api-access-6zz6s\") pod \"mariadb-client\" (UID: \"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b\") " pod="openstack/mariadb-client" Nov 30 08:03:23 crc kubenswrapper[4941]: I1130 08:03:23.627319 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:24 crc kubenswrapper[4941]: I1130 08:03:24.107047 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:24 crc kubenswrapper[4941]: W1130 08:03:24.115832 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9200ab2_f4ac_4d4b_bc9c_e4a33609808b.slice/crio-ce9460316ec541e2e7495fbcfd5480e29b4b5305f0146d014358ac739fc2dec5 WatchSource:0}: Error finding container ce9460316ec541e2e7495fbcfd5480e29b4b5305f0146d014358ac739fc2dec5: Status 404 returned error can't find the container with id ce9460316ec541e2e7495fbcfd5480e29b4b5305f0146d014358ac739fc2dec5 Nov 30 08:03:24 crc kubenswrapper[4941]: I1130 08:03:24.521402 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:03:24 crc kubenswrapper[4941]: E1130 08:03:24.522066 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:03:25 crc kubenswrapper[4941]: I1130 08:03:25.082927 4941 generic.go:334] "Generic (PLEG): container finished" podID="d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" containerID="03e201e8c22e22776dcfb88223134e0306f00bf46f055f623122afaa6cda2d3e" exitCode=0 Nov 30 08:03:25 crc kubenswrapper[4941]: I1130 08:03:25.082981 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b","Type":"ContainerDied","Data":"03e201e8c22e22776dcfb88223134e0306f00bf46f055f623122afaa6cda2d3e"} Nov 30 08:03:25 crc kubenswrapper[4941]: I1130 08:03:25.083015 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b","Type":"ContainerStarted","Data":"ce9460316ec541e2e7495fbcfd5480e29b4b5305f0146d014358ac739fc2dec5"} Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.471890 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.527985 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_d9200ab2-f4ac-4d4b-bc9c-e4a33609808b/mariadb-client/0.log" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.559275 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.564434 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.615820 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zz6s\" (UniqueName: \"kubernetes.io/projected/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b-kube-api-access-6zz6s\") pod \"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b\" (UID: \"d9200ab2-f4ac-4d4b-bc9c-e4a33609808b\") " Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.622606 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b-kube-api-access-6zz6s" (OuterVolumeSpecName: "kube-api-access-6zz6s") pod "d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" (UID: "d9200ab2-f4ac-4d4b-bc9c-e4a33609808b"). InnerVolumeSpecName "kube-api-access-6zz6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.718426 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zz6s\" (UniqueName: \"kubernetes.io/projected/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b-kube-api-access-6zz6s\") on node \"crc\" DevicePath \"\"" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.734303 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:26 crc kubenswrapper[4941]: E1130 08:03:26.735043 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" containerName="mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.735080 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" containerName="mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.735648 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" containerName="mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.737031 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.741436 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.819841 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kdxs\" (UniqueName: \"kubernetes.io/projected/1fe2f7e6-8dc9-4b69-bad6-ea511add5def-kube-api-access-2kdxs\") pod \"mariadb-client\" (UID: \"1fe2f7e6-8dc9-4b69-bad6-ea511add5def\") " pod="openstack/mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.922103 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kdxs\" (UniqueName: \"kubernetes.io/projected/1fe2f7e6-8dc9-4b69-bad6-ea511add5def-kube-api-access-2kdxs\") pod \"mariadb-client\" (UID: \"1fe2f7e6-8dc9-4b69-bad6-ea511add5def\") " pod="openstack/mariadb-client" Nov 30 08:03:26 crc kubenswrapper[4941]: I1130 08:03:26.948424 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kdxs\" (UniqueName: \"kubernetes.io/projected/1fe2f7e6-8dc9-4b69-bad6-ea511add5def-kube-api-access-2kdxs\") pod \"mariadb-client\" (UID: \"1fe2f7e6-8dc9-4b69-bad6-ea511add5def\") " pod="openstack/mariadb-client" Nov 30 08:03:27 crc kubenswrapper[4941]: I1130 08:03:27.078481 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:27 crc kubenswrapper[4941]: I1130 08:03:27.100795 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce9460316ec541e2e7495fbcfd5480e29b4b5305f0146d014358ac739fc2dec5" Nov 30 08:03:27 crc kubenswrapper[4941]: I1130 08:03:27.100929 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:27 crc kubenswrapper[4941]: I1130 08:03:27.134516 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" podUID="1fe2f7e6-8dc9-4b69-bad6-ea511add5def" Nov 30 08:03:27 crc kubenswrapper[4941]: I1130 08:03:27.364273 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:27 crc kubenswrapper[4941]: I1130 08:03:27.535882 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9200ab2-f4ac-4d4b-bc9c-e4a33609808b" path="/var/lib/kubelet/pods/d9200ab2-f4ac-4d4b-bc9c-e4a33609808b/volumes" Nov 30 08:03:28 crc kubenswrapper[4941]: I1130 08:03:28.111881 4941 generic.go:334] "Generic (PLEG): container finished" podID="1fe2f7e6-8dc9-4b69-bad6-ea511add5def" containerID="8f89299237a8f79203b2c6be09045d7b8ee1487341de57c5934eda94d977bdd3" exitCode=0 Nov 30 08:03:28 crc kubenswrapper[4941]: I1130 08:03:28.112208 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"1fe2f7e6-8dc9-4b69-bad6-ea511add5def","Type":"ContainerDied","Data":"8f89299237a8f79203b2c6be09045d7b8ee1487341de57c5934eda94d977bdd3"} Nov 30 08:03:28 crc kubenswrapper[4941]: I1130 08:03:28.112627 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"1fe2f7e6-8dc9-4b69-bad6-ea511add5def","Type":"ContainerStarted","Data":"10245fe48919b9e66b7b88c708372553eceb42da3c9d8e1ca507392e87af82ae"} Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.515753 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.542994 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_1fe2f7e6-8dc9-4b69-bad6-ea511add5def/mariadb-client/0.log" Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.605501 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.612105 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.688946 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kdxs\" (UniqueName: \"kubernetes.io/projected/1fe2f7e6-8dc9-4b69-bad6-ea511add5def-kube-api-access-2kdxs\") pod \"1fe2f7e6-8dc9-4b69-bad6-ea511add5def\" (UID: \"1fe2f7e6-8dc9-4b69-bad6-ea511add5def\") " Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.699860 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fe2f7e6-8dc9-4b69-bad6-ea511add5def-kube-api-access-2kdxs" (OuterVolumeSpecName: "kube-api-access-2kdxs") pod "1fe2f7e6-8dc9-4b69-bad6-ea511add5def" (UID: "1fe2f7e6-8dc9-4b69-bad6-ea511add5def"). InnerVolumeSpecName "kube-api-access-2kdxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:03:29 crc kubenswrapper[4941]: I1130 08:03:29.791781 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kdxs\" (UniqueName: \"kubernetes.io/projected/1fe2f7e6-8dc9-4b69-bad6-ea511add5def-kube-api-access-2kdxs\") on node \"crc\" DevicePath \"\"" Nov 30 08:03:30 crc kubenswrapper[4941]: I1130 08:03:30.144121 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10245fe48919b9e66b7b88c708372553eceb42da3c9d8e1ca507392e87af82ae" Nov 30 08:03:30 crc kubenswrapper[4941]: I1130 08:03:30.144196 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 30 08:03:31 crc kubenswrapper[4941]: I1130 08:03:31.534129 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fe2f7e6-8dc9-4b69-bad6-ea511add5def" path="/var/lib/kubelet/pods/1fe2f7e6-8dc9-4b69-bad6-ea511add5def/volumes" Nov 30 08:03:38 crc kubenswrapper[4941]: I1130 08:03:38.522264 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:03:38 crc kubenswrapper[4941]: E1130 08:03:38.523763 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:03:50 crc kubenswrapper[4941]: I1130 08:03:50.522513 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:03:50 crc kubenswrapper[4941]: E1130 08:03:50.523525 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.859116 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 08:03:59 crc kubenswrapper[4941]: E1130 08:03:59.860440 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fe2f7e6-8dc9-4b69-bad6-ea511add5def" containerName="mariadb-client" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.860462 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fe2f7e6-8dc9-4b69-bad6-ea511add5def" containerName="mariadb-client" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.860691 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fe2f7e6-8dc9-4b69-bad6-ea511add5def" containerName="mariadb-client" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.861945 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.865707 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.869237 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-2wlqf" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.871999 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.886785 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.890012 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.921824 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.936504 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.941656 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.941788 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.958280 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.959424 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e18dd2f-0117-4e65-a420-190874e21598-config\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.959485 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzgjp\" (UniqueName: \"kubernetes.io/projected/5e18dd2f-0117-4e65-a420-190874e21598-kube-api-access-wzgjp\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.959838 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5e18dd2f-0117-4e65-a420-190874e21598-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.959871 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.959965 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e18dd2f-0117-4e65-a420-190874e21598-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.959992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.960023 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqtfk\" (UniqueName: \"kubernetes.io/projected/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-kube-api-access-xqtfk\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.960054 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-05877e52-37cc-4434-ab93-c6743c47b115\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-05877e52-37cc-4434-ab93-c6743c47b115\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.960095 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e18dd2f-0117-4e65-a420-190874e21598-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.960114 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-config\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.960150 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:03:59 crc kubenswrapper[4941]: I1130 08:03:59.960170 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.052162 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.053750 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.057215 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.057545 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.057671 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-kbsw6" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063200 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e18dd2f-0117-4e65-a420-190874e21598-config\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063244 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzgjp\" (UniqueName: \"kubernetes.io/projected/5e18dd2f-0117-4e65-a420-190874e21598-kube-api-access-wzgjp\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063314 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktq44\" (UniqueName: \"kubernetes.io/projected/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-kube-api-access-ktq44\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063352 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5e18dd2f-0117-4e65-a420-190874e21598-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063377 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063402 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e18dd2f-0117-4e65-a420-190874e21598-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063716 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063805 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-config\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063867 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063960 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqtfk\" (UniqueName: \"kubernetes.io/projected/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-kube-api-access-xqtfk\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.063992 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5e18dd2f-0117-4e65-a420-190874e21598-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064031 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-05877e52-37cc-4434-ab93-c6743c47b115\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-05877e52-37cc-4434-ab93-c6743c47b115\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064115 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e18dd2f-0117-4e65-a420-190874e21598-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064158 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-config\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064224 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064269 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064306 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064370 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064425 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064562 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e18dd2f-0117-4e65-a420-190874e21598-config\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.064706 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5e18dd2f-0117-4e65-a420-190874e21598-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.066732 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.070887 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-config\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.071111 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.071921 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.072936 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.072964 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f7b50e3ac4a95cdaf834d63d3be712d0454f600d2f342100c86d9a5bd1f7ad5f/globalmount\"" pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.073610 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.073681 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-05877e52-37cc-4434-ab93-c6743c47b115\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-05877e52-37cc-4434-ab93-c6743c47b115\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ced0ae5a5ba0ddfb7b837de7eb209f7849a8f0d8c02241f6bb0a03cc9e695b15/globalmount\"" pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.094829 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e18dd2f-0117-4e65-a420-190874e21598-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.095012 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.103583 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzgjp\" (UniqueName: \"kubernetes.io/projected/5e18dd2f-0117-4e65-a420-190874e21598-kube-api-access-wzgjp\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.135842 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqtfk\" (UniqueName: \"kubernetes.io/projected/3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d-kube-api-access-xqtfk\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.144903 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.151037 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.159768 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bf7bb0c8-4d43-4dad-81e2-3c3f3ecaff61\") pod \"ovsdbserver-nb-2\" (UID: \"5e18dd2f-0117-4e65-a420-190874e21598\") " pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.167894 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-config\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.167944 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tchs\" (UniqueName: \"kubernetes.io/projected/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-kube-api-access-4tchs\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.168023 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.168065 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.168105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.168144 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.168840 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.169123 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-19216afb-ae7a-4384-932f-7417ae67207c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-19216afb-ae7a-4384-932f-7417ae67207c\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.169147 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktq44\" (UniqueName: \"kubernetes.io/projected/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-kube-api-access-ktq44\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.169232 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.169252 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-config\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.169270 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.171483 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-config\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.172078 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.174147 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.174764 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.175869 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.175892 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/36bff3d046c0825ba6af5da2609bdd7656f832c58eede7e203f00c15106cbb87/globalmount\"" pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.181467 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.183133 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.185534 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-05877e52-37cc-4434-ab93-c6743c47b115\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-05877e52-37cc-4434-ab93-c6743c47b115\") pod \"ovsdbserver-nb-0\" (UID: \"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d\") " pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.189076 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.195234 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktq44\" (UniqueName: \"kubernetes.io/projected/e6ac493b-d7d5-4a71-a87c-e0bd8de8afba-kube-api-access-ktq44\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.197830 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.203894 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.217096 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-553ccaf5-eef7-428a-b1e0-fe2ff0901efe\") pod \"ovsdbserver-nb-1\" (UID: \"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba\") " pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.220963 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.260243 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.274234 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-19216afb-ae7a-4384-932f-7417ae67207c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-19216afb-ae7a-4384-932f-7417ae67207c\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.274517 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.274710 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.274869 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7fedd7ce-5656-4591-909c-b0ff87e1b969-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.275006 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fedd7ce-5656-4591-909c-b0ff87e1b969-config\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.275044 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-config\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.275187 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tchs\" (UniqueName: \"kubernetes.io/projected/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-kube-api-access-4tchs\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.275548 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.275840 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.275961 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedd7ce-5656-4591-909c-b0ff87e1b969-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276013 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276109 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276154 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7fedd7ce-5656-4591-909c-b0ff87e1b969-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276184 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf6vb\" (UniqueName: \"kubernetes.io/projected/7fedd7ce-5656-4591-909c-b0ff87e1b969-kube-api-access-nf6vb\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276286 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5m5q\" (UniqueName: \"kubernetes.io/projected/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-kube-api-access-b5m5q\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276381 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276408 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276446 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a816f6b3-4518-4020-b3d6-f39732625417\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a816f6b3-4518-4020-b3d6-f39732625417\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.276474 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-config\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.278148 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.278163 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-config\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.278648 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.278683 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-19216afb-ae7a-4384-932f-7417ae67207c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-19216afb-ae7a-4384-932f-7417ae67207c\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a06a4a083c8c3a726f4c79ecc56cd6288585d6d5efdb7a564205c7f1216a28c0/globalmount\"" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.285340 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.303274 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tchs\" (UniqueName: \"kubernetes.io/projected/ba0f12dd-04a2-47b7-94b3-14d62c68cbbe-kube-api-access-4tchs\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.328014 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-19216afb-ae7a-4384-932f-7417ae67207c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-19216afb-ae7a-4384-932f-7417ae67207c\") pod \"ovsdbserver-sb-0\" (UID: \"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe\") " pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.377879 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.377931 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7fedd7ce-5656-4591-909c-b0ff87e1b969-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.377956 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fedd7ce-5656-4591-909c-b0ff87e1b969-config\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378050 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedd7ce-5656-4591-909c-b0ff87e1b969-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378120 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378145 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7fedd7ce-5656-4591-909c-b0ff87e1b969-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378165 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf6vb\" (UniqueName: \"kubernetes.io/projected/7fedd7ce-5656-4591-909c-b0ff87e1b969-kube-api-access-nf6vb\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378217 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5m5q\" (UniqueName: \"kubernetes.io/projected/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-kube-api-access-b5m5q\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378265 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378290 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378383 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a816f6b3-4518-4020-b3d6-f39732625417\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a816f6b3-4518-4020-b3d6-f39732625417\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.378413 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-config\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.380103 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7fedd7ce-5656-4591-909c-b0ff87e1b969-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.380443 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fedd7ce-5656-4591-909c-b0ff87e1b969-config\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.380962 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.381349 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7fedd7ce-5656-4591-909c-b0ff87e1b969-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.381722 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.382562 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-config\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.387374 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fedd7ce-5656-4591-909c-b0ff87e1b969-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.389110 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.390222 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.390259 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c795eb75caea4cdd43ecbba9123afd6efeae25a90052cbbf2e0da62e38184d77/globalmount\"" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.390505 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.390694 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a816f6b3-4518-4020-b3d6-f39732625417\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a816f6b3-4518-4020-b3d6-f39732625417\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3a3ffc241e493801880d503254db14ab5bece06163cb50bcac0f250a3b6d9cdb/globalmount\"" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.402364 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf6vb\" (UniqueName: \"kubernetes.io/projected/7fedd7ce-5656-4591-909c-b0ff87e1b969-kube-api-access-nf6vb\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.407873 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5m5q\" (UniqueName: \"kubernetes.io/projected/c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6-kube-api-access-b5m5q\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.459725 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a816f6b3-4518-4020-b3d6-f39732625417\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a816f6b3-4518-4020-b3d6-f39732625417\") pod \"ovsdbserver-sb-1\" (UID: \"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6\") " pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.461380 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-25b903fd-2b2b-43d9-997c-d13ea29c1c53\") pod \"ovsdbserver-sb-2\" (UID: \"7fedd7ce-5656-4591-909c-b0ff87e1b969\") " pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.554311 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.565449 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.577383 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.730828 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.755885 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:04:00 crc kubenswrapper[4941]: I1130 08:04:00.826813 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 30 08:04:00 crc kubenswrapper[4941]: W1130 08:04:00.828422 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3304fe93_73ea_4ebc_ab5d_6b4e761c0c2d.slice/crio-5a52a2fd67c3aa1880abe79372380ed93f359261ed3dc268baab618acb5c2df0 WatchSource:0}: Error finding container 5a52a2fd67c3aa1880abe79372380ed93f359261ed3dc268baab618acb5c2df0: Status 404 returned error can't find the container with id 5a52a2fd67c3aa1880abe79372380ed93f359261ed3dc268baab618acb5c2df0 Nov 30 08:04:01 crc kubenswrapper[4941]: I1130 08:04:01.204545 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 30 08:04:01 crc kubenswrapper[4941]: I1130 08:04:01.292035 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 30 08:04:01 crc kubenswrapper[4941]: I1130 08:04:01.432026 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d","Type":"ContainerStarted","Data":"5a52a2fd67c3aa1880abe79372380ed93f359261ed3dc268baab618acb5c2df0"} Nov 30 08:04:01 crc kubenswrapper[4941]: I1130 08:04:01.433705 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba","Type":"ContainerStarted","Data":"8bdc5e6c2bdb84f0b8a5389ebce65cfd8587923e3e77804f108de4e9f1b1d1b8"} Nov 30 08:04:01 crc kubenswrapper[4941]: I1130 08:04:01.827989 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 30 08:04:02 crc kubenswrapper[4941]: I1130 08:04:02.451379 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"5e18dd2f-0117-4e65-a420-190874e21598","Type":"ContainerStarted","Data":"6da82a1c389aa26ef297ad93376dbdfefc6136049e4e64a6082979e96d82f5e6"} Nov 30 08:04:02 crc kubenswrapper[4941]: I1130 08:04:02.453085 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe","Type":"ContainerStarted","Data":"f418cdb8e1cdfda02bf71b978b576c1fd3e844b330203e06b90d7669272db8bd"} Nov 30 08:04:02 crc kubenswrapper[4941]: I1130 08:04:02.454985 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"7fedd7ce-5656-4591-909c-b0ff87e1b969","Type":"ContainerStarted","Data":"0c29eca12b5ee5474f363c1b928f38f313fb7c27583393fe9cfc74038d5d4076"} Nov 30 08:04:02 crc kubenswrapper[4941]: I1130 08:04:02.809302 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 30 08:04:02 crc kubenswrapper[4941]: W1130 08:04:02.816127 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6c1ed9f_9a62_4026_ab0c_6bdd3f6959e6.slice/crio-bbbfe284853322348023014233b63022fa831935a6787d7cf8ea3ee917dbfad1 WatchSource:0}: Error finding container bbbfe284853322348023014233b63022fa831935a6787d7cf8ea3ee917dbfad1: Status 404 returned error can't find the container with id bbbfe284853322348023014233b63022fa831935a6787d7cf8ea3ee917dbfad1 Nov 30 08:04:03 crc kubenswrapper[4941]: I1130 08:04:03.478533 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6","Type":"ContainerStarted","Data":"bbbfe284853322348023014233b63022fa831935a6787d7cf8ea3ee917dbfad1"} Nov 30 08:04:04 crc kubenswrapper[4941]: I1130 08:04:04.523861 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:04:04 crc kubenswrapper[4941]: E1130 08:04:04.524649 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:04:05 crc kubenswrapper[4941]: I1130 08:04:05.508270 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba","Type":"ContainerStarted","Data":"917f2a483d03e3486fb39b9a9b3b47583354219cc019f85858fcfaa71bf860c8"} Nov 30 08:04:05 crc kubenswrapper[4941]: I1130 08:04:05.515979 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d","Type":"ContainerStarted","Data":"5dceb65b23fd1f7772c54614880eee78fde9828f11100c0dc33a90822be2138e"} Nov 30 08:04:05 crc kubenswrapper[4941]: I1130 08:04:05.537615 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"5e18dd2f-0117-4e65-a420-190874e21598","Type":"ContainerStarted","Data":"0f78830c6a5b0e137497ea33c4029beee58a59c5637a5dc9e313cae3d636f586"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.535526 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"7fedd7ce-5656-4591-909c-b0ff87e1b969","Type":"ContainerStarted","Data":"47e53c23064bceaa00fd3da0552603849b68d9255f440593b2db9448702b0bdb"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.536610 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"7fedd7ce-5656-4591-909c-b0ff87e1b969","Type":"ContainerStarted","Data":"93c1b20a4cd162e81425696083bb20e7268f8b106596cd69124483864203d833"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.538796 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6","Type":"ContainerStarted","Data":"d0dbb17c7aa72a6d651ac38bd88adeb9fb1b7811db11c3f343cf21fd84c6cfab"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.538849 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6","Type":"ContainerStarted","Data":"9ec1feaa55bf73a2b78d48f77ab890f665dd04bbcca07adad8047b3afbdc38ab"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.541687 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d","Type":"ContainerStarted","Data":"678a730781b87745abf1d484658faebef02ecf71203b4dcd4b41d79f83527042"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.543166 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"5e18dd2f-0117-4e65-a420-190874e21598","Type":"ContainerStarted","Data":"a127078626fe723a39908d2d09b285b34fc37ace36a3550ae7486f3ad09ab474"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.545600 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"e6ac493b-d7d5-4a71-a87c-e0bd8de8afba","Type":"ContainerStarted","Data":"43acc4ee2de2eb7c6b3e42261b076d2fd4247fc7acb0a55607d2fd74f2f82705"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.549308 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe","Type":"ContainerStarted","Data":"e5d9c7816a481b116e6d18c9efdc5e3e54159340a9f144c24cc407b1aa52473a"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.549382 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"ba0f12dd-04a2-47b7-94b3-14d62c68cbbe","Type":"ContainerStarted","Data":"a9311f31c2ca3ceb1c8a8f40e558fb39ec2968588bd9b823d846acd875a6bc5b"} Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.555486 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.565060 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=3.402045219 podStartE2EDuration="7.564985742s" podCreationTimestamp="2025-11-30 08:03:59 +0000 UTC" firstStartedPulling="2025-11-30 08:04:01.509869748 +0000 UTC m=+4662.278041357" lastFinishedPulling="2025-11-30 08:04:05.672810271 +0000 UTC m=+4666.440981880" observedRunningTime="2025-11-30 08:04:06.560083269 +0000 UTC m=+4667.328254878" watchObservedRunningTime="2025-11-30 08:04:06.564985742 +0000 UTC m=+4667.333157351" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.565927 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.581236 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.584481 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.468362888 podStartE2EDuration="7.584455573s" podCreationTimestamp="2025-11-30 08:03:59 +0000 UTC" firstStartedPulling="2025-11-30 08:04:01.509058783 +0000 UTC m=+4662.277230402" lastFinishedPulling="2025-11-30 08:04:05.625151478 +0000 UTC m=+4666.393323087" observedRunningTime="2025-11-30 08:04:06.581892384 +0000 UTC m=+4667.350063993" watchObservedRunningTime="2025-11-30 08:04:06.584455573 +0000 UTC m=+4667.352627192" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.608068 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=5.621168471 podStartE2EDuration="8.608042842s" podCreationTimestamp="2025-11-30 08:03:58 +0000 UTC" firstStartedPulling="2025-11-30 08:04:01.905778512 +0000 UTC m=+4662.673950121" lastFinishedPulling="2025-11-30 08:04:04.892652873 +0000 UTC m=+4665.660824492" observedRunningTime="2025-11-30 08:04:06.601978224 +0000 UTC m=+4667.370149833" watchObservedRunningTime="2025-11-30 08:04:06.608042842 +0000 UTC m=+4667.376214441" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.631436 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.920743688 podStartE2EDuration="7.631406014s" podCreationTimestamp="2025-11-30 08:03:59 +0000 UTC" firstStartedPulling="2025-11-30 08:04:02.820960373 +0000 UTC m=+4663.589131982" lastFinishedPulling="2025-11-30 08:04:05.531622699 +0000 UTC m=+4666.299794308" observedRunningTime="2025-11-30 08:04:06.626320187 +0000 UTC m=+4667.394491796" watchObservedRunningTime="2025-11-30 08:04:06.631406014 +0000 UTC m=+4667.399577653" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.676150 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.55934767 podStartE2EDuration="8.676121816s" podCreationTimestamp="2025-11-30 08:03:58 +0000 UTC" firstStartedPulling="2025-11-30 08:04:00.831043291 +0000 UTC m=+4661.599214900" lastFinishedPulling="2025-11-30 08:04:04.947817447 +0000 UTC m=+4665.715989046" observedRunningTime="2025-11-30 08:04:06.655853179 +0000 UTC m=+4667.424024818" watchObservedRunningTime="2025-11-30 08:04:06.676121816 +0000 UTC m=+4667.444293435" Nov 30 08:04:06 crc kubenswrapper[4941]: I1130 08:04:06.680115 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.568570845 podStartE2EDuration="8.680096009s" podCreationTimestamp="2025-11-30 08:03:58 +0000 UTC" firstStartedPulling="2025-11-30 08:04:00.755643701 +0000 UTC m=+4661.523815310" lastFinishedPulling="2025-11-30 08:04:04.867168855 +0000 UTC m=+4665.635340474" observedRunningTime="2025-11-30 08:04:06.674215767 +0000 UTC m=+4667.442387376" watchObservedRunningTime="2025-11-30 08:04:06.680096009 +0000 UTC m=+4667.448267638" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.205376 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.221950 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.260084 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.260946 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.311322 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.383107 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.580065 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.580191 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.580643 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.614024 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.614889 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.630965 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.631464 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.641815 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:09 crc kubenswrapper[4941]: I1130 08:04:09.643650 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.332156 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.336070 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.439513 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.614118 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.630306 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.681955 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ddfc7666f-ttngw"] Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.683825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.686864 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.700483 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ddfc7666f-ttngw"] Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.719172 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.835007 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-config\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.835125 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.835180 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-dns-svc\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.835213 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfgsd\" (UniqueName: \"kubernetes.io/projected/65feb429-0004-4efa-8314-f2d57265dae5-kube-api-access-jfgsd\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.937448 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-config\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.938840 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.939119 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-config\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.939234 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-dns-svc\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.939605 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfgsd\" (UniqueName: \"kubernetes.io/projected/65feb429-0004-4efa-8314-f2d57265dae5-kube-api-access-jfgsd\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.939864 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-ovsdbserver-nb\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.940298 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-dns-svc\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:10 crc kubenswrapper[4941]: I1130 08:04:10.967154 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfgsd\" (UniqueName: \"kubernetes.io/projected/65feb429-0004-4efa-8314-f2d57265dae5-kube-api-access-jfgsd\") pod \"dnsmasq-dns-6ddfc7666f-ttngw\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.007054 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.192379 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddfc7666f-ttngw"] Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.224124 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6957fb7fd9-j9sc9"] Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.227654 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.231691 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.234268 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6957fb7fd9-j9sc9"] Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.361528 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfjjg\" (UniqueName: \"kubernetes.io/projected/e8d96cc0-5e98-48af-878c-bbb6df1ab926-kube-api-access-gfjjg\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.361596 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-sb\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.361644 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-config\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.361668 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-nb\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.361751 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-dns-svc\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.462958 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-dns-svc\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.463040 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfjjg\" (UniqueName: \"kubernetes.io/projected/e8d96cc0-5e98-48af-878c-bbb6df1ab926-kube-api-access-gfjjg\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.463064 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-sb\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.463109 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-config\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.463127 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-nb\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.464369 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-nb\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.464784 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-dns-svc\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.465016 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-sb\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.465588 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-config\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.493058 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfjjg\" (UniqueName: \"kubernetes.io/projected/e8d96cc0-5e98-48af-878c-bbb6df1ab926-kube-api-access-gfjjg\") pod \"dnsmasq-dns-6957fb7fd9-j9sc9\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.551579 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.594403 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddfc7666f-ttngw"] Nov 30 08:04:11 crc kubenswrapper[4941]: I1130 08:04:11.611218 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" event={"ID":"65feb429-0004-4efa-8314-f2d57265dae5","Type":"ContainerStarted","Data":"a81b1830f12ed249e24dd5b7c5dbd2b7cfbfcf429e6a774fe8e2829199d07159"} Nov 30 08:04:12 crc kubenswrapper[4941]: I1130 08:04:12.091824 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6957fb7fd9-j9sc9"] Nov 30 08:04:12 crc kubenswrapper[4941]: I1130 08:04:12.625443 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" event={"ID":"e8d96cc0-5e98-48af-878c-bbb6df1ab926","Type":"ContainerStarted","Data":"b0aa2d98a7c8b5ee62ca6a0230865b832eb4d82c9243684a350c320fad97736c"} Nov 30 08:04:12 crc kubenswrapper[4941]: I1130 08:04:12.629239 4941 generic.go:334] "Generic (PLEG): container finished" podID="65feb429-0004-4efa-8314-f2d57265dae5" containerID="6773d41d7c9b1afdd6f3a552816f7f6d6155db9aac51dc18e98d324b590a4ca8" exitCode=0 Nov 30 08:04:12 crc kubenswrapper[4941]: I1130 08:04:12.629296 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" event={"ID":"65feb429-0004-4efa-8314-f2d57265dae5","Type":"ContainerDied","Data":"6773d41d7c9b1afdd6f3a552816f7f6d6155db9aac51dc18e98d324b590a4ca8"} Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.197553 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.252567 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-dns-svc\") pod \"65feb429-0004-4efa-8314-f2d57265dae5\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.252682 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-ovsdbserver-nb\") pod \"65feb429-0004-4efa-8314-f2d57265dae5\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.252717 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-config\") pod \"65feb429-0004-4efa-8314-f2d57265dae5\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.252748 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfgsd\" (UniqueName: \"kubernetes.io/projected/65feb429-0004-4efa-8314-f2d57265dae5-kube-api-access-jfgsd\") pod \"65feb429-0004-4efa-8314-f2d57265dae5\" (UID: \"65feb429-0004-4efa-8314-f2d57265dae5\") " Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.268090 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65feb429-0004-4efa-8314-f2d57265dae5-kube-api-access-jfgsd" (OuterVolumeSpecName: "kube-api-access-jfgsd") pod "65feb429-0004-4efa-8314-f2d57265dae5" (UID: "65feb429-0004-4efa-8314-f2d57265dae5"). InnerVolumeSpecName "kube-api-access-jfgsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.286142 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-config" (OuterVolumeSpecName: "config") pod "65feb429-0004-4efa-8314-f2d57265dae5" (UID: "65feb429-0004-4efa-8314-f2d57265dae5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.293956 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "65feb429-0004-4efa-8314-f2d57265dae5" (UID: "65feb429-0004-4efa-8314-f2d57265dae5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.297218 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "65feb429-0004-4efa-8314-f2d57265dae5" (UID: "65feb429-0004-4efa-8314-f2d57265dae5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.355205 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.355897 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.355924 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65feb429-0004-4efa-8314-f2d57265dae5-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.355944 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfgsd\" (UniqueName: \"kubernetes.io/projected/65feb429-0004-4efa-8314-f2d57265dae5-kube-api-access-jfgsd\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.498642 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Nov 30 08:04:13 crc kubenswrapper[4941]: E1130 08:04:13.499031 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65feb429-0004-4efa-8314-f2d57265dae5" containerName="init" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.499050 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="65feb429-0004-4efa-8314-f2d57265dae5" containerName="init" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.499215 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="65feb429-0004-4efa-8314-f2d57265dae5" containerName="init" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.499791 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.503040 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.513745 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.558587 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m82v\" (UniqueName: \"kubernetes.io/projected/2940abf6-24c7-499a-8c68-9d4595b42f80-kube-api-access-7m82v\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.558653 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/2940abf6-24c7-499a-8c68-9d4595b42f80-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.558698 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9cff42f1-df09-4855-982a-b703169198fc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.641047 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.641253 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ddfc7666f-ttngw" event={"ID":"65feb429-0004-4efa-8314-f2d57265dae5","Type":"ContainerDied","Data":"a81b1830f12ed249e24dd5b7c5dbd2b7cfbfcf429e6a774fe8e2829199d07159"} Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.642535 4941 scope.go:117] "RemoveContainer" containerID="6773d41d7c9b1afdd6f3a552816f7f6d6155db9aac51dc18e98d324b590a4ca8" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.643308 4941 generic.go:334] "Generic (PLEG): container finished" podID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerID="0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4" exitCode=0 Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.643495 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" event={"ID":"e8d96cc0-5e98-48af-878c-bbb6df1ab926","Type":"ContainerDied","Data":"0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4"} Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.660143 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m82v\" (UniqueName: \"kubernetes.io/projected/2940abf6-24c7-499a-8c68-9d4595b42f80-kube-api-access-7m82v\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.660213 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/2940abf6-24c7-499a-8c68-9d4595b42f80-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.660244 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9cff42f1-df09-4855-982a-b703169198fc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.666498 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.666558 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9cff42f1-df09-4855-982a-b703169198fc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9c55ce82de6e4b88019eafc65815bcb3a99fa124fb5a7c731a2c5884514cb3e6/globalmount\"" pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.690267 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/2940abf6-24c7-499a-8c68-9d4595b42f80-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.694441 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m82v\" (UniqueName: \"kubernetes.io/projected/2940abf6-24c7-499a-8c68-9d4595b42f80-kube-api-access-7m82v\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.752340 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ddfc7666f-ttngw"] Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.763781 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9cff42f1-df09-4855-982a-b703169198fc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") pod \"ovn-copy-data\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " pod="openstack/ovn-copy-data" Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.771470 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ddfc7666f-ttngw"] Nov 30 08:04:13 crc kubenswrapper[4941]: I1130 08:04:13.821463 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 30 08:04:14 crc kubenswrapper[4941]: I1130 08:04:14.398099 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 30 08:04:14 crc kubenswrapper[4941]: W1130 08:04:14.399244 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2940abf6_24c7_499a_8c68_9d4595b42f80.slice/crio-8f436723875bd88b34c561c1279e9da1ec7536a89f89fccdcd6c11d23c73bdda WatchSource:0}: Error finding container 8f436723875bd88b34c561c1279e9da1ec7536a89f89fccdcd6c11d23c73bdda: Status 404 returned error can't find the container with id 8f436723875bd88b34c561c1279e9da1ec7536a89f89fccdcd6c11d23c73bdda Nov 30 08:04:14 crc kubenswrapper[4941]: I1130 08:04:14.656141 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"2940abf6-24c7-499a-8c68-9d4595b42f80","Type":"ContainerStarted","Data":"8f436723875bd88b34c561c1279e9da1ec7536a89f89fccdcd6c11d23c73bdda"} Nov 30 08:04:14 crc kubenswrapper[4941]: I1130 08:04:14.659918 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" event={"ID":"e8d96cc0-5e98-48af-878c-bbb6df1ab926","Type":"ContainerStarted","Data":"bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf"} Nov 30 08:04:14 crc kubenswrapper[4941]: I1130 08:04:14.660232 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:14 crc kubenswrapper[4941]: I1130 08:04:14.705869 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" podStartSLOduration=3.705829581 podStartE2EDuration="3.705829581s" podCreationTimestamp="2025-11-30 08:04:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:04:14.695403208 +0000 UTC m=+4675.463574837" watchObservedRunningTime="2025-11-30 08:04:14.705829581 +0000 UTC m=+4675.474001230" Nov 30 08:04:15 crc kubenswrapper[4941]: I1130 08:04:15.536021 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65feb429-0004-4efa-8314-f2d57265dae5" path="/var/lib/kubelet/pods/65feb429-0004-4efa-8314-f2d57265dae5/volumes" Nov 30 08:04:15 crc kubenswrapper[4941]: I1130 08:04:15.681416 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"2940abf6-24c7-499a-8c68-9d4595b42f80","Type":"ContainerStarted","Data":"c4f1b6f58925c904de5aeeb3fa736e3fb718b9093b5b269f8bc3190cb23246e2"} Nov 30 08:04:15 crc kubenswrapper[4941]: I1130 08:04:15.704917 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.495033349 podStartE2EDuration="3.704890444s" podCreationTimestamp="2025-11-30 08:04:12 +0000 UTC" firstStartedPulling="2025-11-30 08:04:14.402371713 +0000 UTC m=+4675.170543322" lastFinishedPulling="2025-11-30 08:04:14.612228788 +0000 UTC m=+4675.380400417" observedRunningTime="2025-11-30 08:04:15.700992353 +0000 UTC m=+4676.469163962" watchObservedRunningTime="2025-11-30 08:04:15.704890444 +0000 UTC m=+4676.473062073" Nov 30 08:04:17 crc kubenswrapper[4941]: I1130 08:04:17.521985 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:04:17 crc kubenswrapper[4941]: E1130 08:04:17.522937 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.553851 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.699625 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-645f94f75c-fcdsf"] Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.700525 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerName="dnsmasq-dns" containerID="cri-o://d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4" gracePeriod=10 Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.760356 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.762360 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.766345 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-dpqjt" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.766566 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.766729 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.782637 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.853359 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6817f3c1-bc9d-462e-a494-5ac7a10661a5-scripts\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.853412 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7kht\" (UniqueName: \"kubernetes.io/projected/6817f3c1-bc9d-462e-a494-5ac7a10661a5-kube-api-access-z7kht\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.853431 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6817f3c1-bc9d-462e-a494-5ac7a10661a5-config\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.853733 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6817f3c1-bc9d-462e-a494-5ac7a10661a5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.853793 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6817f3c1-bc9d-462e-a494-5ac7a10661a5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.957389 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6817f3c1-bc9d-462e-a494-5ac7a10661a5-scripts\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.957458 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7kht\" (UniqueName: \"kubernetes.io/projected/6817f3c1-bc9d-462e-a494-5ac7a10661a5-kube-api-access-z7kht\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.957491 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6817f3c1-bc9d-462e-a494-5ac7a10661a5-config\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.957582 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6817f3c1-bc9d-462e-a494-5ac7a10661a5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.957649 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6817f3c1-bc9d-462e-a494-5ac7a10661a5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.958444 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6817f3c1-bc9d-462e-a494-5ac7a10661a5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.958638 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6817f3c1-bc9d-462e-a494-5ac7a10661a5-scripts\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.958882 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6817f3c1-bc9d-462e-a494-5ac7a10661a5-config\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.972185 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6817f3c1-bc9d-462e-a494-5ac7a10661a5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:21 crc kubenswrapper[4941]: I1130 08:04:21.974831 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7kht\" (UniqueName: \"kubernetes.io/projected/6817f3c1-bc9d-462e-a494-5ac7a10661a5-kube-api-access-z7kht\") pod \"ovn-northd-0\" (UID: \"6817f3c1-bc9d-462e-a494-5ac7a10661a5\") " pod="openstack/ovn-northd-0" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.113366 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.191432 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.364020 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-config\") pod \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.364118 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-dns-svc\") pod \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.364196 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5t5z\" (UniqueName: \"kubernetes.io/projected/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-kube-api-access-x5t5z\") pod \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\" (UID: \"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc\") " Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.369858 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-kube-api-access-x5t5z" (OuterVolumeSpecName: "kube-api-access-x5t5z") pod "ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" (UID: "ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc"). InnerVolumeSpecName "kube-api-access-x5t5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.405161 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-config" (OuterVolumeSpecName: "config") pod "ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" (UID: "ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.406029 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" (UID: "ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.465714 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.465746 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.465758 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5t5z\" (UniqueName: \"kubernetes.io/projected/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc-kube-api-access-x5t5z\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.587196 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.773844 4941 generic.go:334] "Generic (PLEG): container finished" podID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerID="d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4" exitCode=0 Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.773940 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.773918 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" event={"ID":"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc","Type":"ContainerDied","Data":"d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4"} Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.774114 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f94f75c-fcdsf" event={"ID":"ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc","Type":"ContainerDied","Data":"9c96096de27d18b9fda8341a33e785083b797ede0672b9a1d529deb6a6dbca28"} Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.774158 4941 scope.go:117] "RemoveContainer" containerID="d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.785379 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6817f3c1-bc9d-462e-a494-5ac7a10661a5","Type":"ContainerStarted","Data":"e48bd6bbd0e3eeaa2cd8032668ea35c8a0bf6446f22070812284d651eae0911e"} Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.805980 4941 scope.go:117] "RemoveContainer" containerID="69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.851929 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-645f94f75c-fcdsf"] Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.866764 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-645f94f75c-fcdsf"] Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.912540 4941 scope.go:117] "RemoveContainer" containerID="d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4" Nov 30 08:04:22 crc kubenswrapper[4941]: E1130 08:04:22.913188 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4\": container with ID starting with d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4 not found: ID does not exist" containerID="d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.913255 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4"} err="failed to get container status \"d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4\": rpc error: code = NotFound desc = could not find container \"d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4\": container with ID starting with d6339aa0c8ecf55850614affa1ece89f2f075bf50bcf8d141d946d44d3e257d4 not found: ID does not exist" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.913296 4941 scope.go:117] "RemoveContainer" containerID="69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a" Nov 30 08:04:22 crc kubenswrapper[4941]: E1130 08:04:22.913797 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a\": container with ID starting with 69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a not found: ID does not exist" containerID="69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a" Nov 30 08:04:22 crc kubenswrapper[4941]: I1130 08:04:22.913879 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a"} err="failed to get container status \"69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a\": rpc error: code = NotFound desc = could not find container \"69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a\": container with ID starting with 69ac4949b965efa64beabb7f50e0b4b996cc8208e8200bf4400e80f69061697a not found: ID does not exist" Nov 30 08:04:23 crc kubenswrapper[4941]: I1130 08:04:23.539857 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" path="/var/lib/kubelet/pods/ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc/volumes" Nov 30 08:04:23 crc kubenswrapper[4941]: I1130 08:04:23.798443 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6817f3c1-bc9d-462e-a494-5ac7a10661a5","Type":"ContainerStarted","Data":"8311d81e5d8141a0afd8eb4fa3c7de3d70905a0bb37981d59fab0da44cffc061"} Nov 30 08:04:23 crc kubenswrapper[4941]: I1130 08:04:23.798907 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"6817f3c1-bc9d-462e-a494-5ac7a10661a5","Type":"ContainerStarted","Data":"160ac83410f264718300bed03b1bfb42dc85eb473bccc0895dc65bee5d039afd"} Nov 30 08:04:23 crc kubenswrapper[4941]: I1130 08:04:23.798982 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 30 08:04:23 crc kubenswrapper[4941]: I1130 08:04:23.821546 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.101265798 podStartE2EDuration="2.821527755s" podCreationTimestamp="2025-11-30 08:04:21 +0000 UTC" firstStartedPulling="2025-11-30 08:04:22.59977006 +0000 UTC m=+4683.367941679" lastFinishedPulling="2025-11-30 08:04:23.320032027 +0000 UTC m=+4684.088203636" observedRunningTime="2025-11-30 08:04:23.817189051 +0000 UTC m=+4684.585360660" watchObservedRunningTime="2025-11-30 08:04:23.821527755 +0000 UTC m=+4684.589699364" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.312261 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-gxz8w"] Nov 30 08:04:27 crc kubenswrapper[4941]: E1130 08:04:27.313543 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerName="dnsmasq-dns" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.313558 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerName="dnsmasq-dns" Nov 30 08:04:27 crc kubenswrapper[4941]: E1130 08:04:27.313576 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerName="init" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.313582 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerName="init" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.313772 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce7ca895-e1d0-402b-ba4a-7c7e40d8c3cc" containerName="dnsmasq-dns" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.314481 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.325830 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gxz8w"] Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.370448 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84jp2\" (UniqueName: \"kubernetes.io/projected/92d378e1-3a85-473a-bcc8-e74c780db6ad-kube-api-access-84jp2\") pod \"keystone-db-create-gxz8w\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.370929 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92d378e1-3a85-473a-bcc8-e74c780db6ad-operator-scripts\") pod \"keystone-db-create-gxz8w\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.407849 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7c0e-account-create-update-tjpdc"] Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.409301 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.411734 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.417749 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c0e-account-create-update-tjpdc"] Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.473759 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92d378e1-3a85-473a-bcc8-e74c780db6ad-operator-scripts\") pod \"keystone-db-create-gxz8w\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.473840 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-operator-scripts\") pod \"keystone-7c0e-account-create-update-tjpdc\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.473883 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz2gg\" (UniqueName: \"kubernetes.io/projected/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-kube-api-access-wz2gg\") pod \"keystone-7c0e-account-create-update-tjpdc\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.473922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84jp2\" (UniqueName: \"kubernetes.io/projected/92d378e1-3a85-473a-bcc8-e74c780db6ad-kube-api-access-84jp2\") pod \"keystone-db-create-gxz8w\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.474868 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92d378e1-3a85-473a-bcc8-e74c780db6ad-operator-scripts\") pod \"keystone-db-create-gxz8w\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.575377 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-operator-scripts\") pod \"keystone-7c0e-account-create-update-tjpdc\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.575444 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz2gg\" (UniqueName: \"kubernetes.io/projected/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-kube-api-access-wz2gg\") pod \"keystone-7c0e-account-create-update-tjpdc\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.577252 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-operator-scripts\") pod \"keystone-7c0e-account-create-update-tjpdc\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.790009 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz2gg\" (UniqueName: \"kubernetes.io/projected/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-kube-api-access-wz2gg\") pod \"keystone-7c0e-account-create-update-tjpdc\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.791005 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84jp2\" (UniqueName: \"kubernetes.io/projected/92d378e1-3a85-473a-bcc8-e74c780db6ad-kube-api-access-84jp2\") pod \"keystone-db-create-gxz8w\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:27 crc kubenswrapper[4941]: I1130 08:04:27.950876 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:28 crc kubenswrapper[4941]: I1130 08:04:28.029850 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:28 crc kubenswrapper[4941]: W1130 08:04:28.565700 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92d378e1_3a85_473a_bcc8_e74c780db6ad.slice/crio-3122f1d1d11269bcc40af665fad2944790c4d6f3c03761b37210bb7ea1a04b96 WatchSource:0}: Error finding container 3122f1d1d11269bcc40af665fad2944790c4d6f3c03761b37210bb7ea1a04b96: Status 404 returned error can't find the container with id 3122f1d1d11269bcc40af665fad2944790c4d6f3c03761b37210bb7ea1a04b96 Nov 30 08:04:28 crc kubenswrapper[4941]: I1130 08:04:28.571053 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gxz8w"] Nov 30 08:04:28 crc kubenswrapper[4941]: I1130 08:04:28.657667 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c0e-account-create-update-tjpdc"] Nov 30 08:04:28 crc kubenswrapper[4941]: W1130 08:04:28.664242 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b11da1e_c7d3_4e7d_9f8c_c22fef5fd2f3.slice/crio-9988083b9e3b91182f751a6a59eb4e81487a047d3f858845221046a4bf0d6e2c WatchSource:0}: Error finding container 9988083b9e3b91182f751a6a59eb4e81487a047d3f858845221046a4bf0d6e2c: Status 404 returned error can't find the container with id 9988083b9e3b91182f751a6a59eb4e81487a047d3f858845221046a4bf0d6e2c Nov 30 08:04:28 crc kubenswrapper[4941]: I1130 08:04:28.858729 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c0e-account-create-update-tjpdc" event={"ID":"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3","Type":"ContainerStarted","Data":"9988083b9e3b91182f751a6a59eb4e81487a047d3f858845221046a4bf0d6e2c"} Nov 30 08:04:28 crc kubenswrapper[4941]: I1130 08:04:28.861092 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gxz8w" event={"ID":"92d378e1-3a85-473a-bcc8-e74c780db6ad","Type":"ContainerStarted","Data":"3122f1d1d11269bcc40af665fad2944790c4d6f3c03761b37210bb7ea1a04b96"} Nov 30 08:04:28 crc kubenswrapper[4941]: I1130 08:04:28.881935 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-gxz8w" podStartSLOduration=1.881900242 podStartE2EDuration="1.881900242s" podCreationTimestamp="2025-11-30 08:04:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:04:28.874812014 +0000 UTC m=+4689.642983623" watchObservedRunningTime="2025-11-30 08:04:28.881900242 +0000 UTC m=+4689.650071861" Nov 30 08:04:29 crc kubenswrapper[4941]: I1130 08:04:29.875173 4941 generic.go:334] "Generic (PLEG): container finished" podID="7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" containerID="7d3b5f5301cc1537df9edacec0ffd3adc15f300539709971c6f121a8c78d4e0e" exitCode=0 Nov 30 08:04:29 crc kubenswrapper[4941]: I1130 08:04:29.875276 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c0e-account-create-update-tjpdc" event={"ID":"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3","Type":"ContainerDied","Data":"7d3b5f5301cc1537df9edacec0ffd3adc15f300539709971c6f121a8c78d4e0e"} Nov 30 08:04:29 crc kubenswrapper[4941]: I1130 08:04:29.877509 4941 generic.go:334] "Generic (PLEG): container finished" podID="92d378e1-3a85-473a-bcc8-e74c780db6ad" containerID="9441cbaf0a0fe0fee05416b01ac3ec9b9cf7226a62599facb991873adb9baf8a" exitCode=0 Nov 30 08:04:29 crc kubenswrapper[4941]: I1130 08:04:29.877598 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gxz8w" event={"ID":"92d378e1-3a85-473a-bcc8-e74c780db6ad","Type":"ContainerDied","Data":"9441cbaf0a0fe0fee05416b01ac3ec9b9cf7226a62599facb991873adb9baf8a"} Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.367304 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.374862 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.455478 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-operator-scripts\") pod \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.455606 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84jp2\" (UniqueName: \"kubernetes.io/projected/92d378e1-3a85-473a-bcc8-e74c780db6ad-kube-api-access-84jp2\") pod \"92d378e1-3a85-473a-bcc8-e74c780db6ad\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.455645 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz2gg\" (UniqueName: \"kubernetes.io/projected/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-kube-api-access-wz2gg\") pod \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\" (UID: \"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3\") " Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.455783 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92d378e1-3a85-473a-bcc8-e74c780db6ad-operator-scripts\") pod \"92d378e1-3a85-473a-bcc8-e74c780db6ad\" (UID: \"92d378e1-3a85-473a-bcc8-e74c780db6ad\") " Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.457153 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92d378e1-3a85-473a-bcc8-e74c780db6ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "92d378e1-3a85-473a-bcc8-e74c780db6ad" (UID: "92d378e1-3a85-473a-bcc8-e74c780db6ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.457776 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" (UID: "7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.463886 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-kube-api-access-wz2gg" (OuterVolumeSpecName: "kube-api-access-wz2gg") pod "7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" (UID: "7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3"). InnerVolumeSpecName "kube-api-access-wz2gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.464280 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92d378e1-3a85-473a-bcc8-e74c780db6ad-kube-api-access-84jp2" (OuterVolumeSpecName: "kube-api-access-84jp2") pod "92d378e1-3a85-473a-bcc8-e74c780db6ad" (UID: "92d378e1-3a85-473a-bcc8-e74c780db6ad"). InnerVolumeSpecName "kube-api-access-84jp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.558161 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.558193 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84jp2\" (UniqueName: \"kubernetes.io/projected/92d378e1-3a85-473a-bcc8-e74c780db6ad-kube-api-access-84jp2\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.558207 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz2gg\" (UniqueName: \"kubernetes.io/projected/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3-kube-api-access-wz2gg\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.558219 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92d378e1-3a85-473a-bcc8-e74c780db6ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.897915 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c0e-account-create-update-tjpdc" event={"ID":"7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3","Type":"ContainerDied","Data":"9988083b9e3b91182f751a6a59eb4e81487a047d3f858845221046a4bf0d6e2c"} Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.897950 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c0e-account-create-update-tjpdc" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.897962 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9988083b9e3b91182f751a6a59eb4e81487a047d3f858845221046a4bf0d6e2c" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.899485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gxz8w" event={"ID":"92d378e1-3a85-473a-bcc8-e74c780db6ad","Type":"ContainerDied","Data":"3122f1d1d11269bcc40af665fad2944790c4d6f3c03761b37210bb7ea1a04b96"} Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.899550 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3122f1d1d11269bcc40af665fad2944790c4d6f3c03761b37210bb7ea1a04b96" Nov 30 08:04:31 crc kubenswrapper[4941]: I1130 08:04:31.899564 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gxz8w" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.522782 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:04:32 crc kubenswrapper[4941]: E1130 08:04:32.523310 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.990483 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-v4vzc"] Nov 30 08:04:32 crc kubenswrapper[4941]: E1130 08:04:32.990899 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92d378e1-3a85-473a-bcc8-e74c780db6ad" containerName="mariadb-database-create" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.990922 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="92d378e1-3a85-473a-bcc8-e74c780db6ad" containerName="mariadb-database-create" Nov 30 08:04:32 crc kubenswrapper[4941]: E1130 08:04:32.990947 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" containerName="mariadb-account-create-update" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.990956 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" containerName="mariadb-account-create-update" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.991183 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="92d378e1-3a85-473a-bcc8-e74c780db6ad" containerName="mariadb-database-create" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.991208 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" containerName="mariadb-account-create-update" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.992031 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.994741 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.996221 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5lcl" Nov 30 08:04:32 crc kubenswrapper[4941]: I1130 08:04:32.996598 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.001201 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.005779 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v4vzc"] Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.121699 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h6r5\" (UniqueName: \"kubernetes.io/projected/9eddae14-af30-4521-a774-42753828eb43-kube-api-access-4h6r5\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.122015 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-config-data\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.122168 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-combined-ca-bundle\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.223511 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-combined-ca-bundle\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.223915 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h6r5\" (UniqueName: \"kubernetes.io/projected/9eddae14-af30-4521-a774-42753828eb43-kube-api-access-4h6r5\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.224000 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-config-data\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.228148 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-combined-ca-bundle\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.232256 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-config-data\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.258511 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h6r5\" (UniqueName: \"kubernetes.io/projected/9eddae14-af30-4521-a774-42753828eb43-kube-api-access-4h6r5\") pod \"keystone-db-sync-v4vzc\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.312679 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.765913 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v4vzc"] Nov 30 08:04:33 crc kubenswrapper[4941]: I1130 08:04:33.918213 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vzc" event={"ID":"9eddae14-af30-4521-a774-42753828eb43","Type":"ContainerStarted","Data":"18f5549b7a569a93fe6922ff8e421b665efe3a1ccb7f7758dcbb37e6d5c18342"} Nov 30 08:04:37 crc kubenswrapper[4941]: I1130 08:04:37.197806 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 30 08:04:39 crc kubenswrapper[4941]: I1130 08:04:39.984572 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vzc" event={"ID":"9eddae14-af30-4521-a774-42753828eb43","Type":"ContainerStarted","Data":"121941d06a3111ca355fab13cefa45b71dab8d050c04c6ee313e241825e01acb"} Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.021688 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-v4vzc" podStartSLOduration=2.924074768 podStartE2EDuration="8.021661084s" podCreationTimestamp="2025-11-30 08:04:32 +0000 UTC" firstStartedPulling="2025-11-30 08:04:33.775652389 +0000 UTC m=+4694.543823998" lastFinishedPulling="2025-11-30 08:04:38.873238715 +0000 UTC m=+4699.641410314" observedRunningTime="2025-11-30 08:04:40.010155218 +0000 UTC m=+4700.778326857" watchObservedRunningTime="2025-11-30 08:04:40.021661084 +0000 UTC m=+4700.789832703" Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.874449 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mt6mn"] Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.877717 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.884548 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mt6mn"] Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.981502 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-catalog-content\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.982099 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-utilities\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:40 crc kubenswrapper[4941]: I1130 08:04:40.982287 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7whcf\" (UniqueName: \"kubernetes.io/projected/a471ac9f-67d0-4955-8a08-4ee168a296b1-kube-api-access-7whcf\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.012644 4941 generic.go:334] "Generic (PLEG): container finished" podID="9eddae14-af30-4521-a774-42753828eb43" containerID="121941d06a3111ca355fab13cefa45b71dab8d050c04c6ee313e241825e01acb" exitCode=0 Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.012924 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vzc" event={"ID":"9eddae14-af30-4521-a774-42753828eb43","Type":"ContainerDied","Data":"121941d06a3111ca355fab13cefa45b71dab8d050c04c6ee313e241825e01acb"} Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.084585 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-utilities\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.084655 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7whcf\" (UniqueName: \"kubernetes.io/projected/a471ac9f-67d0-4955-8a08-4ee168a296b1-kube-api-access-7whcf\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.084786 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-catalog-content\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.085503 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-catalog-content\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.086060 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-utilities\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.115206 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7whcf\" (UniqueName: \"kubernetes.io/projected/a471ac9f-67d0-4955-8a08-4ee168a296b1-kube-api-access-7whcf\") pod \"redhat-marketplace-mt6mn\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.212721 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:41 crc kubenswrapper[4941]: I1130 08:04:41.763474 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mt6mn"] Nov 30 08:04:42 crc kubenswrapper[4941]: I1130 08:04:42.027090 4941 generic.go:334] "Generic (PLEG): container finished" podID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerID="3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa" exitCode=0 Nov 30 08:04:42 crc kubenswrapper[4941]: I1130 08:04:42.027217 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mt6mn" event={"ID":"a471ac9f-67d0-4955-8a08-4ee168a296b1","Type":"ContainerDied","Data":"3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa"} Nov 30 08:04:42 crc kubenswrapper[4941]: I1130 08:04:42.027585 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mt6mn" event={"ID":"a471ac9f-67d0-4955-8a08-4ee168a296b1","Type":"ContainerStarted","Data":"7fbe9aac4fe110646244b618d8f984d418d676efd8c3a540b09d5f7bbdee8ded"} Nov 30 08:04:42 crc kubenswrapper[4941]: I1130 08:04:42.977690 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.023029 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h6r5\" (UniqueName: \"kubernetes.io/projected/9eddae14-af30-4521-a774-42753828eb43-kube-api-access-4h6r5\") pod \"9eddae14-af30-4521-a774-42753828eb43\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.023079 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-combined-ca-bundle\") pod \"9eddae14-af30-4521-a774-42753828eb43\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.024335 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-config-data\") pod \"9eddae14-af30-4521-a774-42753828eb43\" (UID: \"9eddae14-af30-4521-a774-42753828eb43\") " Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.037786 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eddae14-af30-4521-a774-42753828eb43-kube-api-access-4h6r5" (OuterVolumeSpecName: "kube-api-access-4h6r5") pod "9eddae14-af30-4521-a774-42753828eb43" (UID: "9eddae14-af30-4521-a774-42753828eb43"). InnerVolumeSpecName "kube-api-access-4h6r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.058451 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vzc" event={"ID":"9eddae14-af30-4521-a774-42753828eb43","Type":"ContainerDied","Data":"18f5549b7a569a93fe6922ff8e421b665efe3a1ccb7f7758dcbb37e6d5c18342"} Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.058495 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18f5549b7a569a93fe6922ff8e421b665efe3a1ccb7f7758dcbb37e6d5c18342" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.058553 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vzc" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.059436 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9eddae14-af30-4521-a774-42753828eb43" (UID: "9eddae14-af30-4521-a774-42753828eb43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.076762 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-config-data" (OuterVolumeSpecName: "config-data") pod "9eddae14-af30-4521-a774-42753828eb43" (UID: "9eddae14-af30-4521-a774-42753828eb43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.126290 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h6r5\" (UniqueName: \"kubernetes.io/projected/9eddae14-af30-4521-a774-42753828eb43-kube-api-access-4h6r5\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.126338 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:43 crc kubenswrapper[4941]: I1130 08:04:43.126349 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eddae14-af30-4521-a774-42753828eb43-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.081200 4941 generic.go:334] "Generic (PLEG): container finished" podID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerID="4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61" exitCode=0 Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.081398 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mt6mn" event={"ID":"a471ac9f-67d0-4955-8a08-4ee168a296b1","Type":"ContainerDied","Data":"4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61"} Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.288296 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dccc884f9-srsgd"] Nov 30 08:04:44 crc kubenswrapper[4941]: E1130 08:04:44.288916 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eddae14-af30-4521-a774-42753828eb43" containerName="keystone-db-sync" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.288941 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eddae14-af30-4521-a774-42753828eb43" containerName="keystone-db-sync" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.289222 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eddae14-af30-4521-a774-42753828eb43" containerName="keystone-db-sync" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.290534 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.296834 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-6wdv4"] Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.299042 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.312557 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.312610 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.317896 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.319890 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5lcl" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.320521 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.331022 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dccc884f9-srsgd"] Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.380750 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-config-data\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.380801 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-credential-keys\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.380890 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-dns-svc\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.380941 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnxqt\" (UniqueName: \"kubernetes.io/projected/ab13ff3b-cfda-414b-83bf-580651d02bd4-kube-api-access-gnxqt\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.395600 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsmbr\" (UniqueName: \"kubernetes.io/projected/1bafa4dd-edc7-43f6-85ac-646b83d95381-kube-api-access-tsmbr\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.395851 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-nb\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.395925 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-combined-ca-bundle\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.396060 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-config\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.396176 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-scripts\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.396268 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-fernet-keys\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.396449 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-sb\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.401389 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6wdv4"] Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498040 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnxqt\" (UniqueName: \"kubernetes.io/projected/ab13ff3b-cfda-414b-83bf-580651d02bd4-kube-api-access-gnxqt\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498135 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsmbr\" (UniqueName: \"kubernetes.io/projected/1bafa4dd-edc7-43f6-85ac-646b83d95381-kube-api-access-tsmbr\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498170 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-nb\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498190 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-combined-ca-bundle\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498217 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-config\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498245 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-scripts\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498265 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-fernet-keys\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498291 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-sb\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498337 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-config-data\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498352 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-credential-keys\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.498381 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-dns-svc\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.499292 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-dns-svc\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.503066 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-config\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.503213 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-nb\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.503305 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-sb\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.505444 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-scripts\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.506172 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-combined-ca-bundle\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.506951 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-credential-keys\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.507904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-config-data\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.525417 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:04:44 crc kubenswrapper[4941]: E1130 08:04:44.525735 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.527747 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsmbr\" (UniqueName: \"kubernetes.io/projected/1bafa4dd-edc7-43f6-85ac-646b83d95381-kube-api-access-tsmbr\") pod \"dnsmasq-dns-6dccc884f9-srsgd\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.538981 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-fernet-keys\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.549770 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnxqt\" (UniqueName: \"kubernetes.io/projected/ab13ff3b-cfda-414b-83bf-580651d02bd4-kube-api-access-gnxqt\") pod \"keystone-bootstrap-6wdv4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.615192 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:44 crc kubenswrapper[4941]: I1130 08:04:44.622480 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:45 crc kubenswrapper[4941]: I1130 08:04:45.104515 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mt6mn" event={"ID":"a471ac9f-67d0-4955-8a08-4ee168a296b1","Type":"ContainerStarted","Data":"e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39"} Nov 30 08:04:45 crc kubenswrapper[4941]: I1130 08:04:45.131194 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mt6mn" podStartSLOduration=2.472041185 podStartE2EDuration="5.131174389s" podCreationTimestamp="2025-11-30 08:04:40 +0000 UTC" firstStartedPulling="2025-11-30 08:04:42.030541682 +0000 UTC m=+4702.798713301" lastFinishedPulling="2025-11-30 08:04:44.689674886 +0000 UTC m=+4705.457846505" observedRunningTime="2025-11-30 08:04:45.125299817 +0000 UTC m=+4705.893471436" watchObservedRunningTime="2025-11-30 08:04:45.131174389 +0000 UTC m=+4705.899345998" Nov 30 08:04:45 crc kubenswrapper[4941]: I1130 08:04:45.160071 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6wdv4"] Nov 30 08:04:45 crc kubenswrapper[4941]: I1130 08:04:45.166610 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dccc884f9-srsgd"] Nov 30 08:04:46 crc kubenswrapper[4941]: I1130 08:04:46.115349 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6wdv4" event={"ID":"ab13ff3b-cfda-414b-83bf-580651d02bd4","Type":"ContainerStarted","Data":"bcd2d404f5852c37d654708a967b39897fc3ad4f1129d7cf67687e683be92388"} Nov 30 08:04:46 crc kubenswrapper[4941]: I1130 08:04:46.116149 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6wdv4" event={"ID":"ab13ff3b-cfda-414b-83bf-580651d02bd4","Type":"ContainerStarted","Data":"8613932e0a456c8a220deffa9a61b74940a070ba710a53f625e7e330205a0fac"} Nov 30 08:04:46 crc kubenswrapper[4941]: I1130 08:04:46.119776 4941 generic.go:334] "Generic (PLEG): container finished" podID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerID="41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c" exitCode=0 Nov 30 08:04:46 crc kubenswrapper[4941]: I1130 08:04:46.119918 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" event={"ID":"1bafa4dd-edc7-43f6-85ac-646b83d95381","Type":"ContainerDied","Data":"41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c"} Nov 30 08:04:46 crc kubenswrapper[4941]: I1130 08:04:46.119945 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" event={"ID":"1bafa4dd-edc7-43f6-85ac-646b83d95381","Type":"ContainerStarted","Data":"b7793cd9f7d0c70ffc9e1ebc0ad68cf3df11c404c8b607b7fa4f2fe81122fb62"} Nov 30 08:04:46 crc kubenswrapper[4941]: I1130 08:04:46.146152 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-6wdv4" podStartSLOduration=2.1461282329999998 podStartE2EDuration="2.146128233s" podCreationTimestamp="2025-11-30 08:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:04:46.141578662 +0000 UTC m=+4706.909750271" watchObservedRunningTime="2025-11-30 08:04:46.146128233 +0000 UTC m=+4706.914299842" Nov 30 08:04:47 crc kubenswrapper[4941]: I1130 08:04:47.135681 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" event={"ID":"1bafa4dd-edc7-43f6-85ac-646b83d95381","Type":"ContainerStarted","Data":"dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6"} Nov 30 08:04:47 crc kubenswrapper[4941]: I1130 08:04:47.136259 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:47 crc kubenswrapper[4941]: I1130 08:04:47.168859 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" podStartSLOduration=3.168836617 podStartE2EDuration="3.168836617s" podCreationTimestamp="2025-11-30 08:04:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:04:47.168399513 +0000 UTC m=+4707.936571152" watchObservedRunningTime="2025-11-30 08:04:47.168836617 +0000 UTC m=+4707.937008226" Nov 30 08:04:49 crc kubenswrapper[4941]: I1130 08:04:49.164265 4941 generic.go:334] "Generic (PLEG): container finished" podID="ab13ff3b-cfda-414b-83bf-580651d02bd4" containerID="bcd2d404f5852c37d654708a967b39897fc3ad4f1129d7cf67687e683be92388" exitCode=0 Nov 30 08:04:49 crc kubenswrapper[4941]: I1130 08:04:49.164411 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6wdv4" event={"ID":"ab13ff3b-cfda-414b-83bf-580651d02bd4","Type":"ContainerDied","Data":"bcd2d404f5852c37d654708a967b39897fc3ad4f1129d7cf67687e683be92388"} Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.503447 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.656958 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnxqt\" (UniqueName: \"kubernetes.io/projected/ab13ff3b-cfda-414b-83bf-580651d02bd4-kube-api-access-gnxqt\") pod \"ab13ff3b-cfda-414b-83bf-580651d02bd4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.657089 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-credential-keys\") pod \"ab13ff3b-cfda-414b-83bf-580651d02bd4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.657138 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-config-data\") pod \"ab13ff3b-cfda-414b-83bf-580651d02bd4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.657177 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-fernet-keys\") pod \"ab13ff3b-cfda-414b-83bf-580651d02bd4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.657222 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-scripts\") pod \"ab13ff3b-cfda-414b-83bf-580651d02bd4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.657267 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-combined-ca-bundle\") pod \"ab13ff3b-cfda-414b-83bf-580651d02bd4\" (UID: \"ab13ff3b-cfda-414b-83bf-580651d02bd4\") " Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.664489 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ab13ff3b-cfda-414b-83bf-580651d02bd4" (UID: "ab13ff3b-cfda-414b-83bf-580651d02bd4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.674793 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-scripts" (OuterVolumeSpecName: "scripts") pod "ab13ff3b-cfda-414b-83bf-580651d02bd4" (UID: "ab13ff3b-cfda-414b-83bf-580651d02bd4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.674854 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab13ff3b-cfda-414b-83bf-580651d02bd4-kube-api-access-gnxqt" (OuterVolumeSpecName: "kube-api-access-gnxqt") pod "ab13ff3b-cfda-414b-83bf-580651d02bd4" (UID: "ab13ff3b-cfda-414b-83bf-580651d02bd4"). InnerVolumeSpecName "kube-api-access-gnxqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.678525 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ab13ff3b-cfda-414b-83bf-580651d02bd4" (UID: "ab13ff3b-cfda-414b-83bf-580651d02bd4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.684162 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab13ff3b-cfda-414b-83bf-580651d02bd4" (UID: "ab13ff3b-cfda-414b-83bf-580651d02bd4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.687658 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-config-data" (OuterVolumeSpecName: "config-data") pod "ab13ff3b-cfda-414b-83bf-580651d02bd4" (UID: "ab13ff3b-cfda-414b-83bf-580651d02bd4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.759494 4941 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.759529 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.759541 4941 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.759551 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.759561 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13ff3b-cfda-414b-83bf-580651d02bd4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:50 crc kubenswrapper[4941]: I1130 08:04:50.759569 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnxqt\" (UniqueName: \"kubernetes.io/projected/ab13ff3b-cfda-414b-83bf-580651d02bd4-kube-api-access-gnxqt\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.189408 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6wdv4" event={"ID":"ab13ff3b-cfda-414b-83bf-580651d02bd4","Type":"ContainerDied","Data":"8613932e0a456c8a220deffa9a61b74940a070ba710a53f625e7e330205a0fac"} Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.189444 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6wdv4" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.189471 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8613932e0a456c8a220deffa9a61b74940a070ba710a53f625e7e330205a0fac" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.212945 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.213043 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.309356 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-6wdv4"] Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.320922 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-6wdv4"] Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.325007 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.381015 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-s68xn"] Nov 30 08:04:51 crc kubenswrapper[4941]: E1130 08:04:51.381771 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab13ff3b-cfda-414b-83bf-580651d02bd4" containerName="keystone-bootstrap" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.381789 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab13ff3b-cfda-414b-83bf-580651d02bd4" containerName="keystone-bootstrap" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.381943 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab13ff3b-cfda-414b-83bf-580651d02bd4" containerName="keystone-bootstrap" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.382580 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.384791 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.384989 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5lcl" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.385411 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.385491 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.385881 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.405941 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-s68xn"] Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.475180 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-fernet-keys\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.475252 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-credential-keys\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.475325 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg59s\" (UniqueName: \"kubernetes.io/projected/a7977ff2-8f69-4b0a-9985-653796e2cd9f-kube-api-access-jg59s\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.475395 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-combined-ca-bundle\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.475433 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-config-data\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.475561 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-scripts\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.532462 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab13ff3b-cfda-414b-83bf-580651d02bd4" path="/var/lib/kubelet/pods/ab13ff3b-cfda-414b-83bf-580651d02bd4/volumes" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.576925 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-credential-keys\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.577015 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg59s\" (UniqueName: \"kubernetes.io/projected/a7977ff2-8f69-4b0a-9985-653796e2cd9f-kube-api-access-jg59s\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.577130 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-combined-ca-bundle\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.577171 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-config-data\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.577262 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-scripts\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.577366 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-fernet-keys\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.583045 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-fernet-keys\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.583743 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-scripts\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.586576 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-config-data\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.587738 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-credential-keys\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.588410 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-combined-ca-bundle\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.598912 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg59s\" (UniqueName: \"kubernetes.io/projected/a7977ff2-8f69-4b0a-9985-653796e2cd9f-kube-api-access-jg59s\") pod \"keystone-bootstrap-s68xn\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:51 crc kubenswrapper[4941]: I1130 08:04:51.700547 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:52 crc kubenswrapper[4941]: I1130 08:04:52.258828 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:52 crc kubenswrapper[4941]: I1130 08:04:52.278527 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-s68xn"] Nov 30 08:04:52 crc kubenswrapper[4941]: I1130 08:04:52.346270 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mt6mn"] Nov 30 08:04:53 crc kubenswrapper[4941]: I1130 08:04:53.213750 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s68xn" event={"ID":"a7977ff2-8f69-4b0a-9985-653796e2cd9f","Type":"ContainerStarted","Data":"90e0fc2554a575e6716cdbb1cbf0424a809888eb5cd020263b299cfcb93d5a9d"} Nov 30 08:04:53 crc kubenswrapper[4941]: I1130 08:04:53.214188 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s68xn" event={"ID":"a7977ff2-8f69-4b0a-9985-653796e2cd9f","Type":"ContainerStarted","Data":"633b8372fca4f88c60a9a21ccb389cb5ed190e5304ddd8db4868d172d168a2d1"} Nov 30 08:04:53 crc kubenswrapper[4941]: I1130 08:04:53.248175 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-s68xn" podStartSLOduration=2.24814988 podStartE2EDuration="2.24814988s" podCreationTimestamp="2025-11-30 08:04:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:04:53.240511264 +0000 UTC m=+4714.008682923" watchObservedRunningTime="2025-11-30 08:04:53.24814988 +0000 UTC m=+4714.016321489" Nov 30 08:04:54 crc kubenswrapper[4941]: I1130 08:04:54.225583 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mt6mn" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="registry-server" containerID="cri-o://e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39" gracePeriod=2 Nov 30 08:04:54 crc kubenswrapper[4941]: I1130 08:04:54.621510 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:04:54 crc kubenswrapper[4941]: I1130 08:04:54.701832 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6957fb7fd9-j9sc9"] Nov 30 08:04:54 crc kubenswrapper[4941]: I1130 08:04:54.703447 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerName="dnsmasq-dns" containerID="cri-o://bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf" gracePeriod=10 Nov 30 08:04:54 crc kubenswrapper[4941]: I1130 08:04:54.966750 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.111570 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-catalog-content\") pod \"a471ac9f-67d0-4955-8a08-4ee168a296b1\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.112046 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-utilities\") pod \"a471ac9f-67d0-4955-8a08-4ee168a296b1\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.112227 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7whcf\" (UniqueName: \"kubernetes.io/projected/a471ac9f-67d0-4955-8a08-4ee168a296b1-kube-api-access-7whcf\") pod \"a471ac9f-67d0-4955-8a08-4ee168a296b1\" (UID: \"a471ac9f-67d0-4955-8a08-4ee168a296b1\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.113478 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-utilities" (OuterVolumeSpecName: "utilities") pod "a471ac9f-67d0-4955-8a08-4ee168a296b1" (UID: "a471ac9f-67d0-4955-8a08-4ee168a296b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.118919 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a471ac9f-67d0-4955-8a08-4ee168a296b1-kube-api-access-7whcf" (OuterVolumeSpecName: "kube-api-access-7whcf") pod "a471ac9f-67d0-4955-8a08-4ee168a296b1" (UID: "a471ac9f-67d0-4955-8a08-4ee168a296b1"). InnerVolumeSpecName "kube-api-access-7whcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.140130 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a471ac9f-67d0-4955-8a08-4ee168a296b1" (UID: "a471ac9f-67d0-4955-8a08-4ee168a296b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.214904 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.214938 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a471ac9f-67d0-4955-8a08-4ee168a296b1-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.214948 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7whcf\" (UniqueName: \"kubernetes.io/projected/a471ac9f-67d0-4955-8a08-4ee168a296b1-kube-api-access-7whcf\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.222366 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.239473 4941 generic.go:334] "Generic (PLEG): container finished" podID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerID="bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf" exitCode=0 Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.240468 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.241105 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" event={"ID":"e8d96cc0-5e98-48af-878c-bbb6df1ab926","Type":"ContainerDied","Data":"bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf"} Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.241146 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6957fb7fd9-j9sc9" event={"ID":"e8d96cc0-5e98-48af-878c-bbb6df1ab926","Type":"ContainerDied","Data":"b0aa2d98a7c8b5ee62ca6a0230865b832eb4d82c9243684a350c320fad97736c"} Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.241168 4941 scope.go:117] "RemoveContainer" containerID="bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.251121 4941 generic.go:334] "Generic (PLEG): container finished" podID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerID="e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39" exitCode=0 Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.251170 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mt6mn" event={"ID":"a471ac9f-67d0-4955-8a08-4ee168a296b1","Type":"ContainerDied","Data":"e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39"} Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.251216 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mt6mn" event={"ID":"a471ac9f-67d0-4955-8a08-4ee168a296b1","Type":"ContainerDied","Data":"7fbe9aac4fe110646244b618d8f984d418d676efd8c3a540b09d5f7bbdee8ded"} Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.251223 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mt6mn" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.314348 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mt6mn"] Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.317491 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-config\") pod \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.317652 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-sb\") pod \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.317715 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-nb\") pod \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.317760 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-dns-svc\") pod \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.317890 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfjjg\" (UniqueName: \"kubernetes.io/projected/e8d96cc0-5e98-48af-878c-bbb6df1ab926-kube-api-access-gfjjg\") pod \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\" (UID: \"e8d96cc0-5e98-48af-878c-bbb6df1ab926\") " Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.321888 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mt6mn"] Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.321975 4941 scope.go:117] "RemoveContainer" containerID="0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.394730 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8d96cc0-5e98-48af-878c-bbb6df1ab926-kube-api-access-gfjjg" (OuterVolumeSpecName: "kube-api-access-gfjjg") pod "e8d96cc0-5e98-48af-878c-bbb6df1ab926" (UID: "e8d96cc0-5e98-48af-878c-bbb6df1ab926"). InnerVolumeSpecName "kube-api-access-gfjjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.409676 4941 scope.go:117] "RemoveContainer" containerID="bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf" Nov 30 08:04:55 crc kubenswrapper[4941]: E1130 08:04:55.410248 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf\": container with ID starting with bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf not found: ID does not exist" containerID="bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.410289 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf"} err="failed to get container status \"bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf\": rpc error: code = NotFound desc = could not find container \"bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf\": container with ID starting with bcb9f1001c2b4d26e7c68194dd3290240118d6e6710e191a2de5dee16dbea0bf not found: ID does not exist" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.410316 4941 scope.go:117] "RemoveContainer" containerID="0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4" Nov 30 08:04:55 crc kubenswrapper[4941]: E1130 08:04:55.410866 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4\": container with ID starting with 0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4 not found: ID does not exist" containerID="0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.410920 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4"} err="failed to get container status \"0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4\": rpc error: code = NotFound desc = could not find container \"0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4\": container with ID starting with 0310d52f7c06d36a6696afa85b4b9f2ed03fe4c9447fc40c18ef1ce6916cc6e4 not found: ID does not exist" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.410957 4941 scope.go:117] "RemoveContainer" containerID="e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.413896 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e8d96cc0-5e98-48af-878c-bbb6df1ab926" (UID: "e8d96cc0-5e98-48af-878c-bbb6df1ab926"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.418051 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e8d96cc0-5e98-48af-878c-bbb6df1ab926" (UID: "e8d96cc0-5e98-48af-878c-bbb6df1ab926"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.419969 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.420006 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.420020 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfjjg\" (UniqueName: \"kubernetes.io/projected/e8d96cc0-5e98-48af-878c-bbb6df1ab926-kube-api-access-gfjjg\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.420643 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-config" (OuterVolumeSpecName: "config") pod "e8d96cc0-5e98-48af-878c-bbb6df1ab926" (UID: "e8d96cc0-5e98-48af-878c-bbb6df1ab926"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.429662 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e8d96cc0-5e98-48af-878c-bbb6df1ab926" (UID: "e8d96cc0-5e98-48af-878c-bbb6df1ab926"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.499499 4941 scope.go:117] "RemoveContainer" containerID="4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.523351 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.523391 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8d96cc0-5e98-48af-878c-bbb6df1ab926-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.537362 4941 scope.go:117] "RemoveContainer" containerID="3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.550261 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" path="/var/lib/kubelet/pods/a471ac9f-67d0-4955-8a08-4ee168a296b1/volumes" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.563374 4941 scope.go:117] "RemoveContainer" containerID="e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39" Nov 30 08:04:55 crc kubenswrapper[4941]: E1130 08:04:55.563936 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39\": container with ID starting with e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39 not found: ID does not exist" containerID="e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.564000 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39"} err="failed to get container status \"e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39\": rpc error: code = NotFound desc = could not find container \"e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39\": container with ID starting with e964e7b9bb7034977d635fba4c3491cae5a08038d98ca35be7029e829abc3b39 not found: ID does not exist" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.564041 4941 scope.go:117] "RemoveContainer" containerID="4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61" Nov 30 08:04:55 crc kubenswrapper[4941]: E1130 08:04:55.564412 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61\": container with ID starting with 4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61 not found: ID does not exist" containerID="4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.564451 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61"} err="failed to get container status \"4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61\": rpc error: code = NotFound desc = could not find container \"4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61\": container with ID starting with 4954bab91d1a54f5496d066b38ec5bf5ad2ead56202fc7f0fab4e0c4817c5f61 not found: ID does not exist" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.564478 4941 scope.go:117] "RemoveContainer" containerID="3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa" Nov 30 08:04:55 crc kubenswrapper[4941]: E1130 08:04:55.564970 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa\": container with ID starting with 3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa not found: ID does not exist" containerID="3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.564994 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa"} err="failed to get container status \"3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa\": rpc error: code = NotFound desc = could not find container \"3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa\": container with ID starting with 3f56d7737d9b970dbbfd919347859c355ed4cec4047410db645168343eb375fa not found: ID does not exist" Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.622472 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6957fb7fd9-j9sc9"] Nov 30 08:04:55 crc kubenswrapper[4941]: I1130 08:04:55.632692 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6957fb7fd9-j9sc9"] Nov 30 08:04:56 crc kubenswrapper[4941]: I1130 08:04:56.272145 4941 generic.go:334] "Generic (PLEG): container finished" podID="a7977ff2-8f69-4b0a-9985-653796e2cd9f" containerID="90e0fc2554a575e6716cdbb1cbf0424a809888eb5cd020263b299cfcb93d5a9d" exitCode=0 Nov 30 08:04:56 crc kubenswrapper[4941]: I1130 08:04:56.272236 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s68xn" event={"ID":"a7977ff2-8f69-4b0a-9985-653796e2cd9f","Type":"ContainerDied","Data":"90e0fc2554a575e6716cdbb1cbf0424a809888eb5cd020263b299cfcb93d5a9d"} Nov 30 08:04:56 crc kubenswrapper[4941]: I1130 08:04:56.522029 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:04:56 crc kubenswrapper[4941]: E1130 08:04:56.522361 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.533772 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" path="/var/lib/kubelet/pods/e8d96cc0-5e98-48af-878c-bbb6df1ab926/volumes" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.630756 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.767977 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-credential-keys\") pod \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.768055 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-combined-ca-bundle\") pod \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.768120 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-scripts\") pod \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.768151 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg59s\" (UniqueName: \"kubernetes.io/projected/a7977ff2-8f69-4b0a-9985-653796e2cd9f-kube-api-access-jg59s\") pod \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.773319 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-config-data\") pod \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.774483 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-fernet-keys\") pod \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\" (UID: \"a7977ff2-8f69-4b0a-9985-653796e2cd9f\") " Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.776021 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7977ff2-8f69-4b0a-9985-653796e2cd9f-kube-api-access-jg59s" (OuterVolumeSpecName: "kube-api-access-jg59s") pod "a7977ff2-8f69-4b0a-9985-653796e2cd9f" (UID: "a7977ff2-8f69-4b0a-9985-653796e2cd9f"). InnerVolumeSpecName "kube-api-access-jg59s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.776098 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a7977ff2-8f69-4b0a-9985-653796e2cd9f" (UID: "a7977ff2-8f69-4b0a-9985-653796e2cd9f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.777298 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-scripts" (OuterVolumeSpecName: "scripts") pod "a7977ff2-8f69-4b0a-9985-653796e2cd9f" (UID: "a7977ff2-8f69-4b0a-9985-653796e2cd9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.777421 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a7977ff2-8f69-4b0a-9985-653796e2cd9f" (UID: "a7977ff2-8f69-4b0a-9985-653796e2cd9f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.878460 4941 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.879027 4941 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.879211 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.879460 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg59s\" (UniqueName: \"kubernetes.io/projected/a7977ff2-8f69-4b0a-9985-653796e2cd9f-kube-api-access-jg59s\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.891480 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-config-data" (OuterVolumeSpecName: "config-data") pod "a7977ff2-8f69-4b0a-9985-653796e2cd9f" (UID: "a7977ff2-8f69-4b0a-9985-653796e2cd9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.892842 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7977ff2-8f69-4b0a-9985-653796e2cd9f" (UID: "a7977ff2-8f69-4b0a-9985-653796e2cd9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.981864 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:57 crc kubenswrapper[4941]: I1130 08:04:57.982359 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7977ff2-8f69-4b0a-9985-653796e2cd9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.301297 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-s68xn" event={"ID":"a7977ff2-8f69-4b0a-9985-653796e2cd9f","Type":"ContainerDied","Data":"633b8372fca4f88c60a9a21ccb389cb5ed190e5304ddd8db4868d172d168a2d1"} Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.301373 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="633b8372fca4f88c60a9a21ccb389cb5ed190e5304ddd8db4868d172d168a2d1" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.301440 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-s68xn" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392309 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-76dc6df48-fpk8r"] Nov 30 08:04:58 crc kubenswrapper[4941]: E1130 08:04:58.392755 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerName="init" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392776 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerName="init" Nov 30 08:04:58 crc kubenswrapper[4941]: E1130 08:04:58.392793 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="registry-server" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392800 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="registry-server" Nov 30 08:04:58 crc kubenswrapper[4941]: E1130 08:04:58.392814 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="extract-utilities" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392822 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="extract-utilities" Nov 30 08:04:58 crc kubenswrapper[4941]: E1130 08:04:58.392838 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerName="dnsmasq-dns" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392844 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerName="dnsmasq-dns" Nov 30 08:04:58 crc kubenswrapper[4941]: E1130 08:04:58.392866 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7977ff2-8f69-4b0a-9985-653796e2cd9f" containerName="keystone-bootstrap" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392872 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7977ff2-8f69-4b0a-9985-653796e2cd9f" containerName="keystone-bootstrap" Nov 30 08:04:58 crc kubenswrapper[4941]: E1130 08:04:58.392882 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="extract-content" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.392887 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="extract-content" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.393063 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a471ac9f-67d0-4955-8a08-4ee168a296b1" containerName="registry-server" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.393079 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8d96cc0-5e98-48af-878c-bbb6df1ab926" containerName="dnsmasq-dns" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.393098 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7977ff2-8f69-4b0a-9985-653796e2cd9f" containerName="keystone-bootstrap" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.393767 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.398560 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-s5lcl" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.398830 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.398860 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.421382 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-76dc6df48-fpk8r"] Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.441000 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.490813 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-config-data\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.490874 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-fernet-keys\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.490974 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4wvf\" (UniqueName: \"kubernetes.io/projected/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-kube-api-access-g4wvf\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.490997 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-credential-keys\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.491017 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-combined-ca-bundle\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.491035 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-scripts\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.594363 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-config-data\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.594464 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-fernet-keys\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.594756 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4wvf\" (UniqueName: \"kubernetes.io/projected/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-kube-api-access-g4wvf\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.594793 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-credential-keys\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.594851 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-combined-ca-bundle\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.594888 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-scripts\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.602065 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-combined-ca-bundle\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.602161 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-fernet-keys\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.602206 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-config-data\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.602236 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-credential-keys\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.602785 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-scripts\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.614912 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4wvf\" (UniqueName: \"kubernetes.io/projected/9a6f721e-1e8e-4f88-a383-4d5bb29a4c27-kube-api-access-g4wvf\") pod \"keystone-76dc6df48-fpk8r\" (UID: \"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27\") " pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:58 crc kubenswrapper[4941]: I1130 08:04:58.715667 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:04:59 crc kubenswrapper[4941]: I1130 08:04:59.157865 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-76dc6df48-fpk8r"] Nov 30 08:04:59 crc kubenswrapper[4941]: W1130 08:04:59.162730 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a6f721e_1e8e_4f88_a383_4d5bb29a4c27.slice/crio-d8606942a58bed1241a2dc22e0f5f22c51dc50f47b66fd7f657a0874aa6d1588 WatchSource:0}: Error finding container d8606942a58bed1241a2dc22e0f5f22c51dc50f47b66fd7f657a0874aa6d1588: Status 404 returned error can't find the container with id d8606942a58bed1241a2dc22e0f5f22c51dc50f47b66fd7f657a0874aa6d1588 Nov 30 08:04:59 crc kubenswrapper[4941]: I1130 08:04:59.313907 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-76dc6df48-fpk8r" event={"ID":"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27","Type":"ContainerStarted","Data":"d8606942a58bed1241a2dc22e0f5f22c51dc50f47b66fd7f657a0874aa6d1588"} Nov 30 08:05:00 crc kubenswrapper[4941]: I1130 08:05:00.324143 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-76dc6df48-fpk8r" event={"ID":"9a6f721e-1e8e-4f88-a383-4d5bb29a4c27","Type":"ContainerStarted","Data":"e60be4e5b581c617b0d60cfad331755fbbfd329c70e5d2c2ad722b660776d671"} Nov 30 08:05:00 crc kubenswrapper[4941]: I1130 08:05:00.324599 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:05:00 crc kubenswrapper[4941]: I1130 08:05:00.346520 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-76dc6df48-fpk8r" podStartSLOduration=2.346501654 podStartE2EDuration="2.346501654s" podCreationTimestamp="2025-11-30 08:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:05:00.344009587 +0000 UTC m=+4721.112181216" watchObservedRunningTime="2025-11-30 08:05:00.346501654 +0000 UTC m=+4721.114673263" Nov 30 08:05:10 crc kubenswrapper[4941]: I1130 08:05:10.521268 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:05:10 crc kubenswrapper[4941]: E1130 08:05:10.522292 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:05:23 crc kubenswrapper[4941]: I1130 08:05:23.523843 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:05:23 crc kubenswrapper[4941]: E1130 08:05:23.525580 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:05:30 crc kubenswrapper[4941]: I1130 08:05:30.289904 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-76dc6df48-fpk8r" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.359036 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.363723 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.372100 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.372262 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.372593 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-qwmrm" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.382665 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.382834 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.382905 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f8f9\" (UniqueName: \"kubernetes.io/projected/e3b21409-ff37-4290-98a5-6658af4fd06d-kube-api-access-6f8f9\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.386746 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.419655 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:32 crc kubenswrapper[4941]: E1130 08:05:32.422463 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-6f8f9 openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="e3b21409-ff37-4290-98a5-6658af4fd06d" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.435427 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.444288 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.451606 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.480990 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.492457 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.492798 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.492896 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f8f9\" (UniqueName: \"kubernetes.io/projected/e3b21409-ff37-4290-98a5-6658af4fd06d-kube-api-access-6f8f9\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.493486 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: E1130 08:05:32.498729 4941 projected.go:194] Error preparing data for projected volume kube-api-access-6f8f9 for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (e3b21409-ff37-4290-98a5-6658af4fd06d) does not match the UID in record. The object might have been deleted and then recreated Nov 30 08:05:32 crc kubenswrapper[4941]: E1130 08:05:32.499431 4941 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e3b21409-ff37-4290-98a5-6658af4fd06d-kube-api-access-6f8f9 podName:e3b21409-ff37-4290-98a5-6658af4fd06d nodeName:}" failed. No retries permitted until 2025-11-30 08:05:32.999402128 +0000 UTC m=+4753.767573727 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-6f8f9" (UniqueName: "kubernetes.io/projected/e3b21409-ff37-4290-98a5-6658af4fd06d-kube-api-access-6f8f9") pod "openstackclient" (UID: "e3b21409-ff37-4290-98a5-6658af4fd06d") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (e3b21409-ff37-4290-98a5-6658af4fd06d) does not match the UID in record. The object might have been deleted and then recreated Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.525950 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.595185 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrtd8\" (UniqueName: \"kubernetes.io/projected/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-kube-api-access-lrtd8\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.595319 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.595490 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config-secret\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.697996 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrtd8\" (UniqueName: \"kubernetes.io/projected/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-kube-api-access-lrtd8\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.698129 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.699630 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.701527 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config-secret\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.705914 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config-secret\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.712663 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.717436 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e3b21409-ff37-4290-98a5-6658af4fd06d" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.720052 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrtd8\" (UniqueName: \"kubernetes.io/projected/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-kube-api-access-lrtd8\") pod \"openstackclient\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.792427 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.793041 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.802445 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config-secret\") pod \"e3b21409-ff37-4290-98a5-6658af4fd06d\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.802508 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config\") pod \"e3b21409-ff37-4290-98a5-6658af4fd06d\" (UID: \"e3b21409-ff37-4290-98a5-6658af4fd06d\") " Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.802822 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f8f9\" (UniqueName: \"kubernetes.io/projected/e3b21409-ff37-4290-98a5-6658af4fd06d-kube-api-access-6f8f9\") on node \"crc\" DevicePath \"\"" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.803225 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e3b21409-ff37-4290-98a5-6658af4fd06d" (UID: "e3b21409-ff37-4290-98a5-6658af4fd06d"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.808917 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e3b21409-ff37-4290-98a5-6658af4fd06d" (UID: "e3b21409-ff37-4290-98a5-6658af4fd06d"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.904505 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:05:32 crc kubenswrapper[4941]: I1130 08:05:32.904549 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3b21409-ff37-4290-98a5-6658af4fd06d-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 30 08:05:33 crc kubenswrapper[4941]: I1130 08:05:33.292984 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 08:05:33 crc kubenswrapper[4941]: I1130 08:05:33.541892 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3b21409-ff37-4290-98a5-6658af4fd06d" path="/var/lib/kubelet/pods/e3b21409-ff37-4290-98a5-6658af4fd06d/volumes" Nov 30 08:05:33 crc kubenswrapper[4941]: I1130 08:05:33.734668 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb","Type":"ContainerStarted","Data":"f3168a3259280afe110633475094e477eb546202d51b3efdb6ceb3985f12c196"} Nov 30 08:05:33 crc kubenswrapper[4941]: I1130 08:05:33.734745 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:05:33 crc kubenswrapper[4941]: I1130 08:05:33.752872 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="e3b21409-ff37-4290-98a5-6658af4fd06d" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" Nov 30 08:05:34 crc kubenswrapper[4941]: I1130 08:05:34.522167 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:05:34 crc kubenswrapper[4941]: E1130 08:05:34.522495 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:05:40 crc kubenswrapper[4941]: I1130 08:05:40.626537 4941 scope.go:117] "RemoveContainer" containerID="774a7ed7dbbc564b94d634d088218c81bd0bd1a86c37224c35ce648fa296d05a" Nov 30 08:05:43 crc kubenswrapper[4941]: I1130 08:05:43.818494 4941 scope.go:117] "RemoveContainer" containerID="c7318f9d47e418dea615a8e2df0a409993737a1bca433947f932c592c23bb4ae" Nov 30 08:05:43 crc kubenswrapper[4941]: I1130 08:05:43.868101 4941 scope.go:117] "RemoveContainer" containerID="5a55f8a0ca4152be72d920d5bc3c6ea5afe516745ef8a460bc1662bab6bb0195" Nov 30 08:05:44 crc kubenswrapper[4941]: I1130 08:05:44.860978 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb","Type":"ContainerStarted","Data":"22dc93670680ebf076a6f69c890f212a609262e12833cf19eaa55540c36212ed"} Nov 30 08:05:44 crc kubenswrapper[4941]: I1130 08:05:44.890751 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.287404721 podStartE2EDuration="12.890725066s" podCreationTimestamp="2025-11-30 08:05:32 +0000 UTC" firstStartedPulling="2025-11-30 08:05:33.304938571 +0000 UTC m=+4754.073110180" lastFinishedPulling="2025-11-30 08:05:43.908258916 +0000 UTC m=+4764.676430525" observedRunningTime="2025-11-30 08:05:44.888370473 +0000 UTC m=+4765.656542122" watchObservedRunningTime="2025-11-30 08:05:44.890725066 +0000 UTC m=+4765.658896685" Nov 30 08:05:47 crc kubenswrapper[4941]: I1130 08:05:47.522424 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:05:47 crc kubenswrapper[4941]: E1130 08:05:47.523413 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:06:02 crc kubenswrapper[4941]: I1130 08:06:02.522999 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:06:02 crc kubenswrapper[4941]: E1130 08:06:02.523938 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:06:15 crc kubenswrapper[4941]: I1130 08:06:15.531619 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:06:15 crc kubenswrapper[4941]: E1130 08:06:15.532753 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:06:26 crc kubenswrapper[4941]: I1130 08:06:26.522440 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:06:26 crc kubenswrapper[4941]: E1130 08:06:26.523413 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:06:37 crc kubenswrapper[4941]: I1130 08:06:37.521807 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:06:37 crc kubenswrapper[4941]: E1130 08:06:37.522803 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:06:43 crc kubenswrapper[4941]: I1130 08:06:43.996529 4941 scope.go:117] "RemoveContainer" containerID="3b5417c20d1f573fa9266030863c68c6c5ba70c7b0eddf3c488dc9c96c6cc436" Nov 30 08:06:52 crc kubenswrapper[4941]: I1130 08:06:52.521519 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:06:52 crc kubenswrapper[4941]: E1130 08:06:52.522769 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:07:06 crc kubenswrapper[4941]: I1130 08:07:06.521867 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:07:06 crc kubenswrapper[4941]: E1130 08:07:06.522856 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.469211 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-zkgfv"] Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.473201 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.479968 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-5d2d-account-create-update-kdc28"] Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.482055 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.484949 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.488955 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5d2d-account-create-update-kdc28"] Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.497678 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-zkgfv"] Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.567223 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-operator-scripts\") pod \"barbican-5d2d-account-create-update-kdc28\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.567282 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz4bt\" (UniqueName: \"kubernetes.io/projected/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-kube-api-access-kz4bt\") pod \"barbican-5d2d-account-create-update-kdc28\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.567319 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbdct\" (UniqueName: \"kubernetes.io/projected/49a676f8-dd5f-4912-b268-9ccc3b7fe108-kube-api-access-fbdct\") pod \"barbican-db-create-zkgfv\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.567363 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a676f8-dd5f-4912-b268-9ccc3b7fe108-operator-scripts\") pod \"barbican-db-create-zkgfv\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.668689 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-operator-scripts\") pod \"barbican-5d2d-account-create-update-kdc28\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.668738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz4bt\" (UniqueName: \"kubernetes.io/projected/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-kube-api-access-kz4bt\") pod \"barbican-5d2d-account-create-update-kdc28\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.668787 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbdct\" (UniqueName: \"kubernetes.io/projected/49a676f8-dd5f-4912-b268-9ccc3b7fe108-kube-api-access-fbdct\") pod \"barbican-db-create-zkgfv\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.668832 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a676f8-dd5f-4912-b268-9ccc3b7fe108-operator-scripts\") pod \"barbican-db-create-zkgfv\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.669960 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-operator-scripts\") pod \"barbican-5d2d-account-create-update-kdc28\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.670190 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a676f8-dd5f-4912-b268-9ccc3b7fe108-operator-scripts\") pod \"barbican-db-create-zkgfv\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.692878 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbdct\" (UniqueName: \"kubernetes.io/projected/49a676f8-dd5f-4912-b268-9ccc3b7fe108-kube-api-access-fbdct\") pod \"barbican-db-create-zkgfv\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.698754 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz4bt\" (UniqueName: \"kubernetes.io/projected/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-kube-api-access-kz4bt\") pod \"barbican-5d2d-account-create-update-kdc28\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.804885 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:15 crc kubenswrapper[4941]: I1130 08:07:15.814191 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.093369 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5d2d-account-create-update-kdc28"] Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.127889 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-zkgfv"] Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.972377 4941 generic.go:334] "Generic (PLEG): container finished" podID="49a676f8-dd5f-4912-b268-9ccc3b7fe108" containerID="17b8ba9ec1ec33645e56fc950b3c6b218cbddda3670a6763b3afe810c9b56def" exitCode=0 Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.972500 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zkgfv" event={"ID":"49a676f8-dd5f-4912-b268-9ccc3b7fe108","Type":"ContainerDied","Data":"17b8ba9ec1ec33645e56fc950b3c6b218cbddda3670a6763b3afe810c9b56def"} Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.974133 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zkgfv" event={"ID":"49a676f8-dd5f-4912-b268-9ccc3b7fe108","Type":"ContainerStarted","Data":"2c1a191ab3b31b844a5931326cf3c19461d7ccabba8aae89ed0af0f126d1a06c"} Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.977092 4941 generic.go:334] "Generic (PLEG): container finished" podID="e736e32c-0e4a-4150-adbc-3fe99e2f48c8" containerID="8e29a923d30838597ce1faa16476d52c498fadd04a61db675f0dcf2db3807df0" exitCode=0 Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.977193 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5d2d-account-create-update-kdc28" event={"ID":"e736e32c-0e4a-4150-adbc-3fe99e2f48c8","Type":"ContainerDied","Data":"8e29a923d30838597ce1faa16476d52c498fadd04a61db675f0dcf2db3807df0"} Nov 30 08:07:16 crc kubenswrapper[4941]: I1130 08:07:16.977269 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5d2d-account-create-update-kdc28" event={"ID":"e736e32c-0e4a-4150-adbc-3fe99e2f48c8","Type":"ContainerStarted","Data":"6d450f41698c4d61a0ff23035fb550dfadb9d4ecb96f2fe1aa5ee03e05c65bc0"} Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.895900 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.902275 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.945986 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a676f8-dd5f-4912-b268-9ccc3b7fe108-operator-scripts\") pod \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.946055 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-operator-scripts\") pod \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.946255 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbdct\" (UniqueName: \"kubernetes.io/projected/49a676f8-dd5f-4912-b268-9ccc3b7fe108-kube-api-access-fbdct\") pod \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\" (UID: \"49a676f8-dd5f-4912-b268-9ccc3b7fe108\") " Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.946280 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz4bt\" (UniqueName: \"kubernetes.io/projected/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-kube-api-access-kz4bt\") pod \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\" (UID: \"e736e32c-0e4a-4150-adbc-3fe99e2f48c8\") " Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.949547 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49a676f8-dd5f-4912-b268-9ccc3b7fe108-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49a676f8-dd5f-4912-b268-9ccc3b7fe108" (UID: "49a676f8-dd5f-4912-b268-9ccc3b7fe108"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.952227 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e736e32c-0e4a-4150-adbc-3fe99e2f48c8" (UID: "e736e32c-0e4a-4150-adbc-3fe99e2f48c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.959646 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49a676f8-dd5f-4912-b268-9ccc3b7fe108-kube-api-access-fbdct" (OuterVolumeSpecName: "kube-api-access-fbdct") pod "49a676f8-dd5f-4912-b268-9ccc3b7fe108" (UID: "49a676f8-dd5f-4912-b268-9ccc3b7fe108"). InnerVolumeSpecName "kube-api-access-fbdct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:07:18 crc kubenswrapper[4941]: I1130 08:07:18.970823 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-kube-api-access-kz4bt" (OuterVolumeSpecName: "kube-api-access-kz4bt") pod "e736e32c-0e4a-4150-adbc-3fe99e2f48c8" (UID: "e736e32c-0e4a-4150-adbc-3fe99e2f48c8"). InnerVolumeSpecName "kube-api-access-kz4bt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.005774 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5d2d-account-create-update-kdc28" event={"ID":"e736e32c-0e4a-4150-adbc-3fe99e2f48c8","Type":"ContainerDied","Data":"6d450f41698c4d61a0ff23035fb550dfadb9d4ecb96f2fe1aa5ee03e05c65bc0"} Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.005860 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d450f41698c4d61a0ff23035fb550dfadb9d4ecb96f2fe1aa5ee03e05c65bc0" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.005801 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5d2d-account-create-update-kdc28" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.007987 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-zkgfv" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.007982 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-zkgfv" event={"ID":"49a676f8-dd5f-4912-b268-9ccc3b7fe108","Type":"ContainerDied","Data":"2c1a191ab3b31b844a5931326cf3c19461d7ccabba8aae89ed0af0f126d1a06c"} Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.008163 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c1a191ab3b31b844a5931326cf3c19461d7ccabba8aae89ed0af0f126d1a06c" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.048515 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbdct\" (UniqueName: \"kubernetes.io/projected/49a676f8-dd5f-4912-b268-9ccc3b7fe108-kube-api-access-fbdct\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.048554 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz4bt\" (UniqueName: \"kubernetes.io/projected/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-kube-api-access-kz4bt\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.048565 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49a676f8-dd5f-4912-b268-9ccc3b7fe108-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:19 crc kubenswrapper[4941]: I1130 08:07:19.048576 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e736e32c-0e4a-4150-adbc-3fe99e2f48c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.522909 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:07:20 crc kubenswrapper[4941]: E1130 08:07:20.523884 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.856104 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-c8bnk"] Nov 30 08:07:20 crc kubenswrapper[4941]: E1130 08:07:20.856650 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49a676f8-dd5f-4912-b268-9ccc3b7fe108" containerName="mariadb-database-create" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.856678 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="49a676f8-dd5f-4912-b268-9ccc3b7fe108" containerName="mariadb-database-create" Nov 30 08:07:20 crc kubenswrapper[4941]: E1130 08:07:20.856739 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e736e32c-0e4a-4150-adbc-3fe99e2f48c8" containerName="mariadb-account-create-update" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.856753 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e736e32c-0e4a-4150-adbc-3fe99e2f48c8" containerName="mariadb-account-create-update" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.857065 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="49a676f8-dd5f-4912-b268-9ccc3b7fe108" containerName="mariadb-database-create" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.857107 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e736e32c-0e4a-4150-adbc-3fe99e2f48c8" containerName="mariadb-account-create-update" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.858301 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.861883 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.878130 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cwtn8" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.884822 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-c8bnk"] Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.896963 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57nr4\" (UniqueName: \"kubernetes.io/projected/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-kube-api-access-57nr4\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.897043 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-db-sync-config-data\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.897116 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-combined-ca-bundle\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.999504 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57nr4\" (UniqueName: \"kubernetes.io/projected/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-kube-api-access-57nr4\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.999571 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-db-sync-config-data\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:20 crc kubenswrapper[4941]: I1130 08:07:20.999633 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-combined-ca-bundle\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:21 crc kubenswrapper[4941]: I1130 08:07:21.006889 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-combined-ca-bundle\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:21 crc kubenswrapper[4941]: I1130 08:07:21.015960 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-db-sync-config-data\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:21 crc kubenswrapper[4941]: I1130 08:07:21.016709 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57nr4\" (UniqueName: \"kubernetes.io/projected/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-kube-api-access-57nr4\") pod \"barbican-db-sync-c8bnk\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:21 crc kubenswrapper[4941]: I1130 08:07:21.179810 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:21 crc kubenswrapper[4941]: I1130 08:07:21.780141 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-c8bnk"] Nov 30 08:07:22 crc kubenswrapper[4941]: I1130 08:07:22.037765 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-c8bnk" event={"ID":"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b","Type":"ContainerStarted","Data":"cf1a66f8bed5b63c6c89d97785f97893c2bb0e3c186c029545bfe08a3f22b960"} Nov 30 08:07:28 crc kubenswrapper[4941]: I1130 08:07:28.102489 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-c8bnk" event={"ID":"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b","Type":"ContainerStarted","Data":"75382cfed0f3e8ef6d3fc213cede13af9e7c3db980daaefeb9bbc7cb371f6935"} Nov 30 08:07:28 crc kubenswrapper[4941]: I1130 08:07:28.128684 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-c8bnk" podStartSLOduration=2.725397063 podStartE2EDuration="8.128656744s" podCreationTimestamp="2025-11-30 08:07:20 +0000 UTC" firstStartedPulling="2025-11-30 08:07:21.789633495 +0000 UTC m=+4862.557805104" lastFinishedPulling="2025-11-30 08:07:27.192893136 +0000 UTC m=+4867.961064785" observedRunningTime="2025-11-30 08:07:28.125654281 +0000 UTC m=+4868.893825950" watchObservedRunningTime="2025-11-30 08:07:28.128656744 +0000 UTC m=+4868.896828383" Nov 30 08:07:29 crc kubenswrapper[4941]: I1130 08:07:29.119186 4941 generic.go:334] "Generic (PLEG): container finished" podID="0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" containerID="75382cfed0f3e8ef6d3fc213cede13af9e7c3db980daaefeb9bbc7cb371f6935" exitCode=0 Nov 30 08:07:29 crc kubenswrapper[4941]: I1130 08:07:29.119260 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-c8bnk" event={"ID":"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b","Type":"ContainerDied","Data":"75382cfed0f3e8ef6d3fc213cede13af9e7c3db980daaefeb9bbc7cb371f6935"} Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.147037 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-c8bnk" event={"ID":"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b","Type":"ContainerDied","Data":"cf1a66f8bed5b63c6c89d97785f97893c2bb0e3c186c029545bfe08a3f22b960"} Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.147508 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf1a66f8bed5b63c6c89d97785f97893c2bb0e3c186c029545bfe08a3f22b960" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.275372 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.415210 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-combined-ca-bundle\") pod \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.415280 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57nr4\" (UniqueName: \"kubernetes.io/projected/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-kube-api-access-57nr4\") pod \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.415314 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-db-sync-config-data\") pod \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\" (UID: \"0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b\") " Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.422379 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" (UID: "0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.423420 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-kube-api-access-57nr4" (OuterVolumeSpecName: "kube-api-access-57nr4") pod "0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" (UID: "0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b"). InnerVolumeSpecName "kube-api-access-57nr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.448715 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" (UID: "0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.517028 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.517065 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57nr4\" (UniqueName: \"kubernetes.io/projected/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-kube-api-access-57nr4\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:31 crc kubenswrapper[4941]: I1130 08:07:31.517078 4941 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.157014 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-c8bnk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.562100 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7f4878bc6c-wfqmk"] Nov 30 08:07:32 crc kubenswrapper[4941]: E1130 08:07:32.562999 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" containerName="barbican-db-sync" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.563022 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" containerName="barbican-db-sync" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.563451 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" containerName="barbican-db-sync" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.564501 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.566647 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.566858 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-cwtn8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.570945 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.573369 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f4878bc6c-wfqmk"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.686296 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-756cddb678-ld46p"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.688513 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.696595 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.704527 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-756cddb678-ld46p"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.714809 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d94679d6f-b2bvf"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.718497 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.726761 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d94679d6f-b2bvf"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.769017 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-combined-ca-bundle\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.769310 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92sgk\" (UniqueName: \"kubernetes.io/projected/5265e97f-b224-4441-99fe-716a0df577f6-kube-api-access-92sgk\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.769430 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-config-data-custom\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.769599 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-config-data\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.769687 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5265e97f-b224-4441-99fe-716a0df577f6-logs\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.786131 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-548cc55d7d-csbl8"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.791601 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.794641 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.802173 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-548cc55d7d-csbl8"] Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.871662 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9cph\" (UniqueName: \"kubernetes.io/projected/25c34180-c904-4cfa-89d8-bf5f1d5320dd-kube-api-access-m9cph\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.871763 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-config-data\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.871841 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-config\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.871917 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-config-data\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.871967 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-sb\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.871990 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvf26\" (UniqueName: \"kubernetes.io/projected/a1602b04-2515-4374-aa8a-04802f9cd569-kube-api-access-tvf26\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873034 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5265e97f-b224-4441-99fe-716a0df577f6-logs\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873070 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-combined-ca-bundle\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873088 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-combined-ca-bundle\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873108 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-nb\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873127 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92sgk\" (UniqueName: \"kubernetes.io/projected/5265e97f-b224-4441-99fe-716a0df577f6-kube-api-access-92sgk\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873148 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-config-data-custom\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873177 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1602b04-2515-4374-aa8a-04802f9cd569-logs\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873195 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-config-data-custom\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873236 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-dns-svc\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.873584 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5265e97f-b224-4441-99fe-716a0df577f6-logs\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.877955 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-combined-ca-bundle\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.879962 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-config-data-custom\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.881364 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5265e97f-b224-4441-99fe-716a0df577f6-config-data\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.916958 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92sgk\" (UniqueName: \"kubernetes.io/projected/5265e97f-b224-4441-99fe-716a0df577f6-kube-api-access-92sgk\") pod \"barbican-worker-7f4878bc6c-wfqmk\" (UID: \"5265e97f-b224-4441-99fe-716a0df577f6\") " pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.975264 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-logs\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.975985 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1602b04-2515-4374-aa8a-04802f9cd569-logs\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976108 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9pdv\" (UniqueName: \"kubernetes.io/projected/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-kube-api-access-n9pdv\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976217 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-dns-svc\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976306 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9cph\" (UniqueName: \"kubernetes.io/projected/25c34180-c904-4cfa-89d8-bf5f1d5320dd-kube-api-access-m9cph\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976416 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-config-data\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976472 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1602b04-2515-4374-aa8a-04802f9cd569-logs\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976491 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-config\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976672 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-config-data-custom\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976777 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-sb\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976822 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvf26\" (UniqueName: \"kubernetes.io/projected/a1602b04-2515-4374-aa8a-04802f9cd569-kube-api-access-tvf26\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976883 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-combined-ca-bundle\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.976979 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-combined-ca-bundle\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.977028 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-nb\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.977068 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-config-data-custom\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.977095 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-config-data\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.977462 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-dns-svc\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.977792 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-config\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.978192 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-sb\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.979647 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-nb\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.982118 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-config-data-custom\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.982327 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-combined-ca-bundle\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.995200 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1602b04-2515-4374-aa8a-04802f9cd569-config-data\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.995202 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9cph\" (UniqueName: \"kubernetes.io/projected/25c34180-c904-4cfa-89d8-bf5f1d5320dd-kube-api-access-m9cph\") pod \"dnsmasq-dns-d94679d6f-b2bvf\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:32 crc kubenswrapper[4941]: I1130 08:07:32.997052 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvf26\" (UniqueName: \"kubernetes.io/projected/a1602b04-2515-4374-aa8a-04802f9cd569-kube-api-access-tvf26\") pod \"barbican-keystone-listener-756cddb678-ld46p\" (UID: \"a1602b04-2515-4374-aa8a-04802f9cd569\") " pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.010573 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-756cddb678-ld46p" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.047133 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.078737 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-combined-ca-bundle\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.078812 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-config-data\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.078841 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-logs\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.079691 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-logs\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.080015 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9pdv\" (UniqueName: \"kubernetes.io/projected/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-kube-api-access-n9pdv\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.080102 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-config-data-custom\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.082857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-config-data\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.085027 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-config-data-custom\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.088199 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-combined-ca-bundle\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.101043 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9pdv\" (UniqueName: \"kubernetes.io/projected/9093646c-4fff-4bb3-8b5e-c040a3da6cd7-kube-api-access-n9pdv\") pod \"barbican-api-548cc55d7d-csbl8\" (UID: \"9093646c-4fff-4bb3-8b5e-c040a3da6cd7\") " pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.132013 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.187557 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f4878bc6c-wfqmk" Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.460349 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-756cddb678-ld46p"] Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.535840 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f4878bc6c-wfqmk"] Nov 30 08:07:33 crc kubenswrapper[4941]: W1130 08:07:33.542227 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5265e97f_b224_4441_99fe_716a0df577f6.slice/crio-46a67b145a080c43d4330762c67d4e5485297265afad44150004053c53547311 WatchSource:0}: Error finding container 46a67b145a080c43d4330762c67d4e5485297265afad44150004053c53547311: Status 404 returned error can't find the container with id 46a67b145a080c43d4330762c67d4e5485297265afad44150004053c53547311 Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.608228 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d94679d6f-b2bvf"] Nov 30 08:07:33 crc kubenswrapper[4941]: I1130 08:07:33.685732 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-548cc55d7d-csbl8"] Nov 30 08:07:33 crc kubenswrapper[4941]: W1130 08:07:33.708529 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9093646c_4fff_4bb3_8b5e_c040a3da6cd7.slice/crio-97ff31f2bfa858ff2ee95218f4d9990076dea01564c2564bba7d0373c4c6bdf2 WatchSource:0}: Error finding container 97ff31f2bfa858ff2ee95218f4d9990076dea01564c2564bba7d0373c4c6bdf2: Status 404 returned error can't find the container with id 97ff31f2bfa858ff2ee95218f4d9990076dea01564c2564bba7d0373c4c6bdf2 Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.187355 4941 generic.go:334] "Generic (PLEG): container finished" podID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerID="bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1" exitCode=0 Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.187449 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" event={"ID":"25c34180-c904-4cfa-89d8-bf5f1d5320dd","Type":"ContainerDied","Data":"bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.187964 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" event={"ID":"25c34180-c904-4cfa-89d8-bf5f1d5320dd","Type":"ContainerStarted","Data":"cbb44a3f3b71c7da4822dde79eb5891d38c494c2f96895395688b01447f3595b"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.189597 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f4878bc6c-wfqmk" event={"ID":"5265e97f-b224-4441-99fe-716a0df577f6","Type":"ContainerStarted","Data":"46a67b145a080c43d4330762c67d4e5485297265afad44150004053c53547311"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.192685 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-756cddb678-ld46p" event={"ID":"a1602b04-2515-4374-aa8a-04802f9cd569","Type":"ContainerStarted","Data":"438ce919732e217231a3d109b8bf9591e88102545d63364c916e802dd67811fa"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.198824 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-548cc55d7d-csbl8" event={"ID":"9093646c-4fff-4bb3-8b5e-c040a3da6cd7","Type":"ContainerStarted","Data":"d224305824e89561ce2d4040c929ed38a02182734df33bb479f19a357eacf87f"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.198867 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-548cc55d7d-csbl8" event={"ID":"9093646c-4fff-4bb3-8b5e-c040a3da6cd7","Type":"ContainerStarted","Data":"21f2abd9de3dfd8ce9c8e33c547eacd55c160c68a93a03a437ef6da1d389e00c"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.198882 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-548cc55d7d-csbl8" event={"ID":"9093646c-4fff-4bb3-8b5e-c040a3da6cd7","Type":"ContainerStarted","Data":"97ff31f2bfa858ff2ee95218f4d9990076dea01564c2564bba7d0373c4c6bdf2"} Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.199071 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.199141 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.233833 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-548cc55d7d-csbl8" podStartSLOduration=2.233810057 podStartE2EDuration="2.233810057s" podCreationTimestamp="2025-11-30 08:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:07:34.227967456 +0000 UTC m=+4874.996139065" watchObservedRunningTime="2025-11-30 08:07:34.233810057 +0000 UTC m=+4875.001981666" Nov 30 08:07:34 crc kubenswrapper[4941]: I1130 08:07:34.522152 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:07:35 crc kubenswrapper[4941]: I1130 08:07:35.223377 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"b4e31e26d1d1b739fd99a437fefc2ed911d6c5ebbad46f7debdd4b9fe9b10aa8"} Nov 30 08:07:35 crc kubenswrapper[4941]: I1130 08:07:35.227218 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" event={"ID":"25c34180-c904-4cfa-89d8-bf5f1d5320dd","Type":"ContainerStarted","Data":"7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b"} Nov 30 08:07:35 crc kubenswrapper[4941]: I1130 08:07:35.227509 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:35 crc kubenswrapper[4941]: I1130 08:07:35.234962 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f4878bc6c-wfqmk" event={"ID":"5265e97f-b224-4441-99fe-716a0df577f6","Type":"ContainerStarted","Data":"13b3a91d1b6e091c75eedc83ad31d2a4e7f8c6ca10250ac71e2e4008fda4c134"} Nov 30 08:07:35 crc kubenswrapper[4941]: I1130 08:07:35.238223 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-756cddb678-ld46p" event={"ID":"a1602b04-2515-4374-aa8a-04802f9cd569","Type":"ContainerStarted","Data":"42f38ae92de6b18bc4edcce36631fa5484f6e87606b15f46fff0a2ca6afa9824"} Nov 30 08:07:35 crc kubenswrapper[4941]: I1130 08:07:35.280678 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" podStartSLOduration=3.280655377 podStartE2EDuration="3.280655377s" podCreationTimestamp="2025-11-30 08:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:07:35.273857666 +0000 UTC m=+4876.042029275" watchObservedRunningTime="2025-11-30 08:07:35.280655377 +0000 UTC m=+4876.048826986" Nov 30 08:07:36 crc kubenswrapper[4941]: I1130 08:07:36.253261 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-756cddb678-ld46p" event={"ID":"a1602b04-2515-4374-aa8a-04802f9cd569","Type":"ContainerStarted","Data":"a6828a2f43f760439a76150c0493d6ad0f4fda16c2e878b5e6bc8435fe7960c3"} Nov 30 08:07:36 crc kubenswrapper[4941]: I1130 08:07:36.260380 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f4878bc6c-wfqmk" event={"ID":"5265e97f-b224-4441-99fe-716a0df577f6","Type":"ContainerStarted","Data":"99626effefe5afca74770acd76737d0f8e013942fd14ceddec35366aad5cea71"} Nov 30 08:07:36 crc kubenswrapper[4941]: I1130 08:07:36.299584 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-756cddb678-ld46p" podStartSLOduration=3.016902755 podStartE2EDuration="4.299550093s" podCreationTimestamp="2025-11-30 08:07:32 +0000 UTC" firstStartedPulling="2025-11-30 08:07:33.467162945 +0000 UTC m=+4874.235334554" lastFinishedPulling="2025-11-30 08:07:34.749810273 +0000 UTC m=+4875.517981892" observedRunningTime="2025-11-30 08:07:36.283886839 +0000 UTC m=+4877.052058448" watchObservedRunningTime="2025-11-30 08:07:36.299550093 +0000 UTC m=+4877.067721742" Nov 30 08:07:36 crc kubenswrapper[4941]: I1130 08:07:36.319797 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7f4878bc6c-wfqmk" podStartSLOduration=3.113415687 podStartE2EDuration="4.319763907s" podCreationTimestamp="2025-11-30 08:07:32 +0000 UTC" firstStartedPulling="2025-11-30 08:07:33.545127884 +0000 UTC m=+4874.313299493" lastFinishedPulling="2025-11-30 08:07:34.751476064 +0000 UTC m=+4875.519647713" observedRunningTime="2025-11-30 08:07:36.30498122 +0000 UTC m=+4877.073152829" watchObservedRunningTime="2025-11-30 08:07:36.319763907 +0000 UTC m=+4877.087935516" Nov 30 08:07:39 crc kubenswrapper[4941]: I1130 08:07:39.674443 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:41 crc kubenswrapper[4941]: I1130 08:07:41.155354 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-548cc55d7d-csbl8" Nov 30 08:07:43 crc kubenswrapper[4941]: I1130 08:07:43.049601 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:07:43 crc kubenswrapper[4941]: I1130 08:07:43.141024 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dccc884f9-srsgd"] Nov 30 08:07:43 crc kubenswrapper[4941]: I1130 08:07:43.141326 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerName="dnsmasq-dns" containerID="cri-o://dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6" gracePeriod=10 Nov 30 08:07:43 crc kubenswrapper[4941]: I1130 08:07:43.942978 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.012841 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-nb\") pod \"1bafa4dd-edc7-43f6-85ac-646b83d95381\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.012977 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsmbr\" (UniqueName: \"kubernetes.io/projected/1bafa4dd-edc7-43f6-85ac-646b83d95381-kube-api-access-tsmbr\") pod \"1bafa4dd-edc7-43f6-85ac-646b83d95381\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.013003 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-sb\") pod \"1bafa4dd-edc7-43f6-85ac-646b83d95381\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.013164 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-dns-svc\") pod \"1bafa4dd-edc7-43f6-85ac-646b83d95381\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.013199 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-config\") pod \"1bafa4dd-edc7-43f6-85ac-646b83d95381\" (UID: \"1bafa4dd-edc7-43f6-85ac-646b83d95381\") " Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.020596 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bafa4dd-edc7-43f6-85ac-646b83d95381-kube-api-access-tsmbr" (OuterVolumeSpecName: "kube-api-access-tsmbr") pod "1bafa4dd-edc7-43f6-85ac-646b83d95381" (UID: "1bafa4dd-edc7-43f6-85ac-646b83d95381"). InnerVolumeSpecName "kube-api-access-tsmbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.065859 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-config" (OuterVolumeSpecName: "config") pod "1bafa4dd-edc7-43f6-85ac-646b83d95381" (UID: "1bafa4dd-edc7-43f6-85ac-646b83d95381"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.078270 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1bafa4dd-edc7-43f6-85ac-646b83d95381" (UID: "1bafa4dd-edc7-43f6-85ac-646b83d95381"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.080017 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1bafa4dd-edc7-43f6-85ac-646b83d95381" (UID: "1bafa4dd-edc7-43f6-85ac-646b83d95381"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.096915 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1bafa4dd-edc7-43f6-85ac-646b83d95381" (UID: "1bafa4dd-edc7-43f6-85ac-646b83d95381"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.115589 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.115635 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.115646 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsmbr\" (UniqueName: \"kubernetes.io/projected/1bafa4dd-edc7-43f6-85ac-646b83d95381-kube-api-access-tsmbr\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.115662 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.115674 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bafa4dd-edc7-43f6-85ac-646b83d95381-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.378154 4941 generic.go:334] "Generic (PLEG): container finished" podID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerID="dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6" exitCode=0 Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.378404 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" event={"ID":"1bafa4dd-edc7-43f6-85ac-646b83d95381","Type":"ContainerDied","Data":"dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6"} Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.378921 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" event={"ID":"1bafa4dd-edc7-43f6-85ac-646b83d95381","Type":"ContainerDied","Data":"b7793cd9f7d0c70ffc9e1ebc0ad68cf3df11c404c8b607b7fa4f2fe81122fb62"} Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.379005 4941 scope.go:117] "RemoveContainer" containerID="dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.378517 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dccc884f9-srsgd" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.400267 4941 scope.go:117] "RemoveContainer" containerID="41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.446168 4941 scope.go:117] "RemoveContainer" containerID="dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6" Nov 30 08:07:44 crc kubenswrapper[4941]: E1130 08:07:44.446807 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6\": container with ID starting with dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6 not found: ID does not exist" containerID="dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.446861 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6"} err="failed to get container status \"dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6\": rpc error: code = NotFound desc = could not find container \"dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6\": container with ID starting with dca54eda15e753f85c863e9479f0057b35fcaf2685bad79530cd525f3a3503c6 not found: ID does not exist" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.446893 4941 scope.go:117] "RemoveContainer" containerID="41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c" Nov 30 08:07:44 crc kubenswrapper[4941]: E1130 08:07:44.447260 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c\": container with ID starting with 41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c not found: ID does not exist" containerID="41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.447339 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c"} err="failed to get container status \"41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c\": rpc error: code = NotFound desc = could not find container \"41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c\": container with ID starting with 41edd3a5b7bc464b30e67106fa871e6c001f8dc8bc342f996011302904f4b47c not found: ID does not exist" Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.449382 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dccc884f9-srsgd"] Nov 30 08:07:44 crc kubenswrapper[4941]: I1130 08:07:44.460693 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dccc884f9-srsgd"] Nov 30 08:07:45 crc kubenswrapper[4941]: I1130 08:07:45.539663 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" path="/var/lib/kubelet/pods/1bafa4dd-edc7-43f6-85ac-646b83d95381/volumes" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.573380 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-szx5m"] Nov 30 08:07:54 crc kubenswrapper[4941]: E1130 08:07:54.574521 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerName="init" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.574539 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerName="init" Nov 30 08:07:54 crc kubenswrapper[4941]: E1130 08:07:54.574594 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerName="dnsmasq-dns" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.574600 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerName="dnsmasq-dns" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.574835 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bafa4dd-edc7-43f6-85ac-646b83d95381" containerName="dnsmasq-dns" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.575714 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.591789 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-szx5m"] Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.666846 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46540d03-cc01-4368-9097-a0db9616b3a9-operator-scripts\") pod \"neutron-db-create-szx5m\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.667349 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sfdj\" (UniqueName: \"kubernetes.io/projected/46540d03-cc01-4368-9097-a0db9616b3a9-kube-api-access-2sfdj\") pod \"neutron-db-create-szx5m\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.681778 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8389-account-create-update-m7724"] Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.682914 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.701264 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8389-account-create-update-m7724"] Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.705884 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.770084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3232a4a-5afb-4b43-aa1d-80d03640444e-operator-scripts\") pod \"neutron-8389-account-create-update-m7724\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.770160 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46540d03-cc01-4368-9097-a0db9616b3a9-operator-scripts\") pod \"neutron-db-create-szx5m\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.770256 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2hjz\" (UniqueName: \"kubernetes.io/projected/a3232a4a-5afb-4b43-aa1d-80d03640444e-kube-api-access-n2hjz\") pod \"neutron-8389-account-create-update-m7724\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.770286 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sfdj\" (UniqueName: \"kubernetes.io/projected/46540d03-cc01-4368-9097-a0db9616b3a9-kube-api-access-2sfdj\") pod \"neutron-db-create-szx5m\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.772014 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46540d03-cc01-4368-9097-a0db9616b3a9-operator-scripts\") pod \"neutron-db-create-szx5m\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.793918 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sfdj\" (UniqueName: \"kubernetes.io/projected/46540d03-cc01-4368-9097-a0db9616b3a9-kube-api-access-2sfdj\") pod \"neutron-db-create-szx5m\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.872641 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2hjz\" (UniqueName: \"kubernetes.io/projected/a3232a4a-5afb-4b43-aa1d-80d03640444e-kube-api-access-n2hjz\") pod \"neutron-8389-account-create-update-m7724\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.873082 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3232a4a-5afb-4b43-aa1d-80d03640444e-operator-scripts\") pod \"neutron-8389-account-create-update-m7724\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.874039 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3232a4a-5afb-4b43-aa1d-80d03640444e-operator-scripts\") pod \"neutron-8389-account-create-update-m7724\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.911080 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.911136 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2hjz\" (UniqueName: \"kubernetes.io/projected/a3232a4a-5afb-4b43-aa1d-80d03640444e-kube-api-access-n2hjz\") pod \"neutron-8389-account-create-update-m7724\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:54 crc kubenswrapper[4941]: I1130 08:07:54.998780 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:55 crc kubenswrapper[4941]: W1130 08:07:55.402285 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46540d03_cc01_4368_9097_a0db9616b3a9.slice/crio-d991510a5a44af46f4a12a6dd42a8f9118c33637d18e0e7eda8f669f8fac559b WatchSource:0}: Error finding container d991510a5a44af46f4a12a6dd42a8f9118c33637d18e0e7eda8f669f8fac559b: Status 404 returned error can't find the container with id d991510a5a44af46f4a12a6dd42a8f9118c33637d18e0e7eda8f669f8fac559b Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.405260 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-szx5m"] Nov 30 08:07:55 crc kubenswrapper[4941]: W1130 08:07:55.535083 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3232a4a_5afb_4b43_aa1d_80d03640444e.slice/crio-e27e98c0a14c2fd6eb33b47a8c55f17bfd7f9d6b2431df7c974176e5165395b8 WatchSource:0}: Error finding container e27e98c0a14c2fd6eb33b47a8c55f17bfd7f9d6b2431df7c974176e5165395b8: Status 404 returned error can't find the container with id e27e98c0a14c2fd6eb33b47a8c55f17bfd7f9d6b2431df7c974176e5165395b8 Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.550018 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8389-account-create-update-m7724"] Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.829928 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-szx5m" event={"ID":"46540d03-cc01-4368-9097-a0db9616b3a9","Type":"ContainerStarted","Data":"b21747adecca951148c0b6fdb98d07314f18f3930075899b1d6df1c3a8bb2677"} Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.830375 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-szx5m" event={"ID":"46540d03-cc01-4368-9097-a0db9616b3a9","Type":"ContainerStarted","Data":"d991510a5a44af46f4a12a6dd42a8f9118c33637d18e0e7eda8f669f8fac559b"} Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.831409 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8389-account-create-update-m7724" event={"ID":"a3232a4a-5afb-4b43-aa1d-80d03640444e","Type":"ContainerStarted","Data":"d1461aa04a4331e5ef7c4b92e13b15edc48d62ad8f50f72983515624c9dedecc"} Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.831469 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8389-account-create-update-m7724" event={"ID":"a3232a4a-5afb-4b43-aa1d-80d03640444e","Type":"ContainerStarted","Data":"e27e98c0a14c2fd6eb33b47a8c55f17bfd7f9d6b2431df7c974176e5165395b8"} Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.863090 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-szx5m" podStartSLOduration=1.863053346 podStartE2EDuration="1.863053346s" podCreationTimestamp="2025-11-30 08:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:07:55.850786607 +0000 UTC m=+4896.618958256" watchObservedRunningTime="2025-11-30 08:07:55.863053346 +0000 UTC m=+4896.631224995" Nov 30 08:07:55 crc kubenswrapper[4941]: I1130 08:07:55.881015 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-8389-account-create-update-m7724" podStartSLOduration=1.88097497 podStartE2EDuration="1.88097497s" podCreationTimestamp="2025-11-30 08:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:07:55.875523261 +0000 UTC m=+4896.643694900" watchObservedRunningTime="2025-11-30 08:07:55.88097497 +0000 UTC m=+4896.649146619" Nov 30 08:07:56 crc kubenswrapper[4941]: I1130 08:07:56.847396 4941 generic.go:334] "Generic (PLEG): container finished" podID="46540d03-cc01-4368-9097-a0db9616b3a9" containerID="b21747adecca951148c0b6fdb98d07314f18f3930075899b1d6df1c3a8bb2677" exitCode=0 Nov 30 08:07:56 crc kubenswrapper[4941]: I1130 08:07:56.847494 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-szx5m" event={"ID":"46540d03-cc01-4368-9097-a0db9616b3a9","Type":"ContainerDied","Data":"b21747adecca951148c0b6fdb98d07314f18f3930075899b1d6df1c3a8bb2677"} Nov 30 08:07:56 crc kubenswrapper[4941]: I1130 08:07:56.851182 4941 generic.go:334] "Generic (PLEG): container finished" podID="a3232a4a-5afb-4b43-aa1d-80d03640444e" containerID="d1461aa04a4331e5ef7c4b92e13b15edc48d62ad8f50f72983515624c9dedecc" exitCode=0 Nov 30 08:07:56 crc kubenswrapper[4941]: I1130 08:07:56.851220 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8389-account-create-update-m7724" event={"ID":"a3232a4a-5afb-4b43-aa1d-80d03640444e","Type":"ContainerDied","Data":"d1461aa04a4331e5ef7c4b92e13b15edc48d62ad8f50f72983515624c9dedecc"} Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.776103 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.781635 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.874670 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sfdj\" (UniqueName: \"kubernetes.io/projected/46540d03-cc01-4368-9097-a0db9616b3a9-kube-api-access-2sfdj\") pod \"46540d03-cc01-4368-9097-a0db9616b3a9\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.874874 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2hjz\" (UniqueName: \"kubernetes.io/projected/a3232a4a-5afb-4b43-aa1d-80d03640444e-kube-api-access-n2hjz\") pod \"a3232a4a-5afb-4b43-aa1d-80d03640444e\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.876301 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3232a4a-5afb-4b43-aa1d-80d03640444e-operator-scripts\") pod \"a3232a4a-5afb-4b43-aa1d-80d03640444e\" (UID: \"a3232a4a-5afb-4b43-aa1d-80d03640444e\") " Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.876373 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46540d03-cc01-4368-9097-a0db9616b3a9-operator-scripts\") pod \"46540d03-cc01-4368-9097-a0db9616b3a9\" (UID: \"46540d03-cc01-4368-9097-a0db9616b3a9\") " Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.878268 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3232a4a-5afb-4b43-aa1d-80d03640444e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a3232a4a-5afb-4b43-aa1d-80d03640444e" (UID: "a3232a4a-5afb-4b43-aa1d-80d03640444e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.878458 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46540d03-cc01-4368-9097-a0db9616b3a9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "46540d03-cc01-4368-9097-a0db9616b3a9" (UID: "46540d03-cc01-4368-9097-a0db9616b3a9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.880713 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-szx5m" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.880726 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-szx5m" event={"ID":"46540d03-cc01-4368-9097-a0db9616b3a9","Type":"ContainerDied","Data":"d991510a5a44af46f4a12a6dd42a8f9118c33637d18e0e7eda8f669f8fac559b"} Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.880766 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d991510a5a44af46f4a12a6dd42a8f9118c33637d18e0e7eda8f669f8fac559b" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.886108 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8389-account-create-update-m7724" event={"ID":"a3232a4a-5afb-4b43-aa1d-80d03640444e","Type":"ContainerDied","Data":"e27e98c0a14c2fd6eb33b47a8c55f17bfd7f9d6b2431df7c974176e5165395b8"} Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.886165 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e27e98c0a14c2fd6eb33b47a8c55f17bfd7f9d6b2431df7c974176e5165395b8" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.886215 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8389-account-create-update-m7724" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.889501 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46540d03-cc01-4368-9097-a0db9616b3a9-kube-api-access-2sfdj" (OuterVolumeSpecName: "kube-api-access-2sfdj") pod "46540d03-cc01-4368-9097-a0db9616b3a9" (UID: "46540d03-cc01-4368-9097-a0db9616b3a9"). InnerVolumeSpecName "kube-api-access-2sfdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.908847 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3232a4a-5afb-4b43-aa1d-80d03640444e-kube-api-access-n2hjz" (OuterVolumeSpecName: "kube-api-access-n2hjz") pod "a3232a4a-5afb-4b43-aa1d-80d03640444e" (UID: "a3232a4a-5afb-4b43-aa1d-80d03640444e"). InnerVolumeSpecName "kube-api-access-n2hjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.979956 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3232a4a-5afb-4b43-aa1d-80d03640444e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.979993 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46540d03-cc01-4368-9097-a0db9616b3a9-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.980003 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sfdj\" (UniqueName: \"kubernetes.io/projected/46540d03-cc01-4368-9097-a0db9616b3a9-kube-api-access-2sfdj\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:58 crc kubenswrapper[4941]: I1130 08:07:58.980013 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2hjz\" (UniqueName: \"kubernetes.io/projected/a3232a4a-5afb-4b43-aa1d-80d03640444e-kube-api-access-n2hjz\") on node \"crc\" DevicePath \"\"" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.892419 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-bwln7"] Nov 30 08:07:59 crc kubenswrapper[4941]: E1130 08:07:59.892882 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3232a4a-5afb-4b43-aa1d-80d03640444e" containerName="mariadb-account-create-update" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.892899 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3232a4a-5afb-4b43-aa1d-80d03640444e" containerName="mariadb-account-create-update" Nov 30 08:07:59 crc kubenswrapper[4941]: E1130 08:07:59.892929 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46540d03-cc01-4368-9097-a0db9616b3a9" containerName="mariadb-database-create" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.892938 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="46540d03-cc01-4368-9097-a0db9616b3a9" containerName="mariadb-database-create" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.893378 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="46540d03-cc01-4368-9097-a0db9616b3a9" containerName="mariadb-database-create" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.893402 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3232a4a-5afb-4b43-aa1d-80d03640444e" containerName="mariadb-account-create-update" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.894369 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwln7" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.897362 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.899905 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pj82d" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.905063 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.908759 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bwln7"] Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.999047 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6tpr\" (UniqueName: \"kubernetes.io/projected/8c74578e-3a37-4eb5-992a-5378b4d10be1-kube-api-access-t6tpr\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.999434 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-combined-ca-bundle\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:07:59 crc kubenswrapper[4941]: I1130 08:07:59.999591 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-config\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.107209 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6tpr\" (UniqueName: \"kubernetes.io/projected/8c74578e-3a37-4eb5-992a-5378b4d10be1-kube-api-access-t6tpr\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.107319 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-combined-ca-bundle\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.107455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-config\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.116057 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-combined-ca-bundle\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.119200 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-config\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.132210 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6tpr\" (UniqueName: \"kubernetes.io/projected/8c74578e-3a37-4eb5-992a-5378b4d10be1-kube-api-access-t6tpr\") pod \"neutron-db-sync-bwln7\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.212760 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.678180 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bwln7"] Nov 30 08:08:00 crc kubenswrapper[4941]: I1130 08:08:00.909097 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwln7" event={"ID":"8c74578e-3a37-4eb5-992a-5378b4d10be1","Type":"ContainerStarted","Data":"9cb6dcbf3579ad62c8f96d3e4d645924736916f83531007da2e29f004b516fe1"} Nov 30 08:08:01 crc kubenswrapper[4941]: I1130 08:08:01.920666 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwln7" event={"ID":"8c74578e-3a37-4eb5-992a-5378b4d10be1","Type":"ContainerStarted","Data":"de008258045eea3358e7c0869723b3d8d17871af9c03ebc72d0681a9bbfabdac"} Nov 30 08:08:01 crc kubenswrapper[4941]: I1130 08:08:01.953976 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-bwln7" podStartSLOduration=2.953942537 podStartE2EDuration="2.953942537s" podCreationTimestamp="2025-11-30 08:07:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:08:01.94822534 +0000 UTC m=+4902.716396949" watchObservedRunningTime="2025-11-30 08:08:01.953942537 +0000 UTC m=+4902.722114176" Nov 30 08:08:05 crc kubenswrapper[4941]: I1130 08:08:05.957657 4941 generic.go:334] "Generic (PLEG): container finished" podID="8c74578e-3a37-4eb5-992a-5378b4d10be1" containerID="de008258045eea3358e7c0869723b3d8d17871af9c03ebc72d0681a9bbfabdac" exitCode=0 Nov 30 08:08:05 crc kubenswrapper[4941]: I1130 08:08:05.957746 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwln7" event={"ID":"8c74578e-3a37-4eb5-992a-5378b4d10be1","Type":"ContainerDied","Data":"de008258045eea3358e7c0869723b3d8d17871af9c03ebc72d0681a9bbfabdac"} Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.309149 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.476310 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-combined-ca-bundle\") pod \"8c74578e-3a37-4eb5-992a-5378b4d10be1\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.476682 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-config\") pod \"8c74578e-3a37-4eb5-992a-5378b4d10be1\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.476753 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6tpr\" (UniqueName: \"kubernetes.io/projected/8c74578e-3a37-4eb5-992a-5378b4d10be1-kube-api-access-t6tpr\") pod \"8c74578e-3a37-4eb5-992a-5378b4d10be1\" (UID: \"8c74578e-3a37-4eb5-992a-5378b4d10be1\") " Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.485988 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c74578e-3a37-4eb5-992a-5378b4d10be1-kube-api-access-t6tpr" (OuterVolumeSpecName: "kube-api-access-t6tpr") pod "8c74578e-3a37-4eb5-992a-5378b4d10be1" (UID: "8c74578e-3a37-4eb5-992a-5378b4d10be1"). InnerVolumeSpecName "kube-api-access-t6tpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.505961 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c74578e-3a37-4eb5-992a-5378b4d10be1" (UID: "8c74578e-3a37-4eb5-992a-5378b4d10be1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.534454 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-config" (OuterVolumeSpecName: "config") pod "8c74578e-3a37-4eb5-992a-5378b4d10be1" (UID: "8c74578e-3a37-4eb5-992a-5378b4d10be1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.579703 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.579754 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6tpr\" (UniqueName: \"kubernetes.io/projected/8c74578e-3a37-4eb5-992a-5378b4d10be1-kube-api-access-t6tpr\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.579772 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c74578e-3a37-4eb5-992a-5378b4d10be1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.984078 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bwln7" event={"ID":"8c74578e-3a37-4eb5-992a-5378b4d10be1","Type":"ContainerDied","Data":"9cb6dcbf3579ad62c8f96d3e4d645924736916f83531007da2e29f004b516fe1"} Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.984632 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cb6dcbf3579ad62c8f96d3e4d645924736916f83531007da2e29f004b516fe1" Nov 30 08:08:07 crc kubenswrapper[4941]: I1130 08:08:07.984193 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bwln7" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.174168 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b9957f557-pv64j"] Nov 30 08:08:08 crc kubenswrapper[4941]: E1130 08:08:08.174700 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74578e-3a37-4eb5-992a-5378b4d10be1" containerName="neutron-db-sync" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.174734 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74578e-3a37-4eb5-992a-5378b4d10be1" containerName="neutron-db-sync" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.174931 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74578e-3a37-4eb5-992a-5378b4d10be1" containerName="neutron-db-sync" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.176175 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.191803 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b9957f557-pv64j"] Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.291824 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g78hr\" (UniqueName: \"kubernetes.io/projected/f466e9bf-9be7-4555-8667-a8b710462f04-kube-api-access-g78hr\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.291903 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-nb\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.291965 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-dns-svc\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.292001 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-config\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.292039 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-sb\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.376718 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-68c887db59-hwrwh"] Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.378266 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.381137 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.381256 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.381184 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pj82d" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.394099 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-config\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.394400 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-sb\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.394550 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g78hr\" (UniqueName: \"kubernetes.io/projected/f466e9bf-9be7-4555-8667-a8b710462f04-kube-api-access-g78hr\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.394656 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-nb\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.394772 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-dns-svc\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.395494 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-sb\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.395499 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-config\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.395817 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-nb\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.396081 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-dns-svc\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.400284 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-68c887db59-hwrwh"] Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.420056 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g78hr\" (UniqueName: \"kubernetes.io/projected/f466e9bf-9be7-4555-8667-a8b710462f04-kube-api-access-g78hr\") pod \"dnsmasq-dns-b9957f557-pv64j\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.496245 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.496865 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgxlm\" (UniqueName: \"kubernetes.io/projected/240bba4d-ff88-447f-8879-045591768175-kube-api-access-xgxlm\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.496966 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-config\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.496991 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-httpd-config\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.497014 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-combined-ca-bundle\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.601383 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-config\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.601840 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-httpd-config\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.601888 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-combined-ca-bundle\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.602154 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgxlm\" (UniqueName: \"kubernetes.io/projected/240bba4d-ff88-447f-8879-045591768175-kube-api-access-xgxlm\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.609532 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-httpd-config\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.610052 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-config\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.620823 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/240bba4d-ff88-447f-8879-045591768175-combined-ca-bundle\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.638201 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgxlm\" (UniqueName: \"kubernetes.io/projected/240bba4d-ff88-447f-8879-045591768175-kube-api-access-xgxlm\") pod \"neutron-68c887db59-hwrwh\" (UID: \"240bba4d-ff88-447f-8879-045591768175\") " pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:08 crc kubenswrapper[4941]: I1130 08:08:08.704165 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:09 crc kubenswrapper[4941]: I1130 08:08:09.043628 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b9957f557-pv64j"] Nov 30 08:08:09 crc kubenswrapper[4941]: I1130 08:08:09.152470 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-68c887db59-hwrwh"] Nov 30 08:08:10 crc kubenswrapper[4941]: I1130 08:08:10.026601 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68c887db59-hwrwh" event={"ID":"240bba4d-ff88-447f-8879-045591768175","Type":"ContainerStarted","Data":"736c2971b9657255682102beea4caa7a21562ae0c234804ece8667ab68108ea9"} Nov 30 08:08:10 crc kubenswrapper[4941]: I1130 08:08:10.029405 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9957f557-pv64j" event={"ID":"f466e9bf-9be7-4555-8667-a8b710462f04","Type":"ContainerStarted","Data":"4a09d38985ce462d9ff39b8ad2118ce24c5331772211fb8b8f947c6a2eafbbe5"} Nov 30 08:08:11 crc kubenswrapper[4941]: I1130 08:08:11.045587 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68c887db59-hwrwh" event={"ID":"240bba4d-ff88-447f-8879-045591768175","Type":"ContainerStarted","Data":"6045def9e47be71af21e3ae067eb85eb47d9999b79cf1894b64448e9ee54cccf"} Nov 30 08:08:11 crc kubenswrapper[4941]: I1130 08:08:11.046267 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:11 crc kubenswrapper[4941]: I1130 08:08:11.046278 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-68c887db59-hwrwh" event={"ID":"240bba4d-ff88-447f-8879-045591768175","Type":"ContainerStarted","Data":"d338dc7e34f5b55daa5408be655fe25d828e435097a35a9a5a19ab16f0b9dc4b"} Nov 30 08:08:11 crc kubenswrapper[4941]: I1130 08:08:11.050676 4941 generic.go:334] "Generic (PLEG): container finished" podID="f466e9bf-9be7-4555-8667-a8b710462f04" containerID="683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c" exitCode=0 Nov 30 08:08:11 crc kubenswrapper[4941]: I1130 08:08:11.050750 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9957f557-pv64j" event={"ID":"f466e9bf-9be7-4555-8667-a8b710462f04","Type":"ContainerDied","Data":"683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c"} Nov 30 08:08:11 crc kubenswrapper[4941]: I1130 08:08:11.081108 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-68c887db59-hwrwh" podStartSLOduration=3.081079455 podStartE2EDuration="3.081079455s" podCreationTimestamp="2025-11-30 08:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:08:11.065852635 +0000 UTC m=+4911.834024264" watchObservedRunningTime="2025-11-30 08:08:11.081079455 +0000 UTC m=+4911.849251064" Nov 30 08:08:12 crc kubenswrapper[4941]: I1130 08:08:12.074604 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9957f557-pv64j" event={"ID":"f466e9bf-9be7-4555-8667-a8b710462f04","Type":"ContainerStarted","Data":"bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba"} Nov 30 08:08:12 crc kubenswrapper[4941]: I1130 08:08:12.075584 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:12 crc kubenswrapper[4941]: I1130 08:08:12.105776 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b9957f557-pv64j" podStartSLOduration=4.1057522 podStartE2EDuration="4.1057522s" podCreationTimestamp="2025-11-30 08:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:08:12.105646437 +0000 UTC m=+4912.873818046" watchObservedRunningTime="2025-11-30 08:08:12.1057522 +0000 UTC m=+4912.873923809" Nov 30 08:08:18 crc kubenswrapper[4941]: I1130 08:08:18.498718 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:08:18 crc kubenswrapper[4941]: I1130 08:08:18.580918 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d94679d6f-b2bvf"] Nov 30 08:08:18 crc kubenswrapper[4941]: I1130 08:08:18.581243 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerName="dnsmasq-dns" containerID="cri-o://7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b" gracePeriod=10 Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.135900 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.160892 4941 generic.go:334] "Generic (PLEG): container finished" podID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerID="7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b" exitCode=0 Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.160952 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" event={"ID":"25c34180-c904-4cfa-89d8-bf5f1d5320dd","Type":"ContainerDied","Data":"7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b"} Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.160988 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" event={"ID":"25c34180-c904-4cfa-89d8-bf5f1d5320dd","Type":"ContainerDied","Data":"cbb44a3f3b71c7da4822dde79eb5891d38c494c2f96895395688b01447f3595b"} Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.161009 4941 scope.go:117] "RemoveContainer" containerID="7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.161178 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d94679d6f-b2bvf" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.194246 4941 scope.go:117] "RemoveContainer" containerID="bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.233993 4941 scope.go:117] "RemoveContainer" containerID="7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b" Nov 30 08:08:19 crc kubenswrapper[4941]: E1130 08:08:19.234474 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b\": container with ID starting with 7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b not found: ID does not exist" containerID="7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.234549 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b"} err="failed to get container status \"7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b\": rpc error: code = NotFound desc = could not find container \"7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b\": container with ID starting with 7ec6f2c0773e0c3da97741a63c849942cfbc057470dc712f1cffa7cc09641b1b not found: ID does not exist" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.234586 4941 scope.go:117] "RemoveContainer" containerID="bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1" Nov 30 08:08:19 crc kubenswrapper[4941]: E1130 08:08:19.234973 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1\": container with ID starting with bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1 not found: ID does not exist" containerID="bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.235019 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1"} err="failed to get container status \"bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1\": rpc error: code = NotFound desc = could not find container \"bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1\": container with ID starting with bb43ee18f9aa04dc1128df687c25e5f5ba7e7b2538a321e18b2938bae4ec35f1 not found: ID does not exist" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.264778 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-sb\") pod \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.264998 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-config\") pod \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.265086 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9cph\" (UniqueName: \"kubernetes.io/projected/25c34180-c904-4cfa-89d8-bf5f1d5320dd-kube-api-access-m9cph\") pod \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.265121 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-nb\") pod \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.265197 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-dns-svc\") pod \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\" (UID: \"25c34180-c904-4cfa-89d8-bf5f1d5320dd\") " Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.274721 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c34180-c904-4cfa-89d8-bf5f1d5320dd-kube-api-access-m9cph" (OuterVolumeSpecName: "kube-api-access-m9cph") pod "25c34180-c904-4cfa-89d8-bf5f1d5320dd" (UID: "25c34180-c904-4cfa-89d8-bf5f1d5320dd"). InnerVolumeSpecName "kube-api-access-m9cph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.322632 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "25c34180-c904-4cfa-89d8-bf5f1d5320dd" (UID: "25c34180-c904-4cfa-89d8-bf5f1d5320dd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.324008 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-config" (OuterVolumeSpecName: "config") pod "25c34180-c904-4cfa-89d8-bf5f1d5320dd" (UID: "25c34180-c904-4cfa-89d8-bf5f1d5320dd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.331091 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "25c34180-c904-4cfa-89d8-bf5f1d5320dd" (UID: "25c34180-c904-4cfa-89d8-bf5f1d5320dd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.331291 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "25c34180-c904-4cfa-89d8-bf5f1d5320dd" (UID: "25c34180-c904-4cfa-89d8-bf5f1d5320dd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.367825 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.367862 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9cph\" (UniqueName: \"kubernetes.io/projected/25c34180-c904-4cfa-89d8-bf5f1d5320dd-kube-api-access-m9cph\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.367877 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.367899 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.367912 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25c34180-c904-4cfa-89d8-bf5f1d5320dd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.503654 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d94679d6f-b2bvf"] Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.515750 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d94679d6f-b2bvf"] Nov 30 08:08:19 crc kubenswrapper[4941]: I1130 08:08:19.532700 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" path="/var/lib/kubelet/pods/25c34180-c904-4cfa-89d8-bf5f1d5320dd/volumes" Nov 30 08:08:38 crc kubenswrapper[4941]: I1130 08:08:38.717985 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-68c887db59-hwrwh" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.272194 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-b628k"] Nov 30 08:08:46 crc kubenswrapper[4941]: E1130 08:08:46.273569 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerName="init" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.273589 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerName="init" Nov 30 08:08:46 crc kubenswrapper[4941]: E1130 08:08:46.273623 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerName="dnsmasq-dns" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.273629 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerName="dnsmasq-dns" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.273793 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c34180-c904-4cfa-89d8-bf5f1d5320dd" containerName="dnsmasq-dns" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.274436 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.290489 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-b628k"] Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.373455 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-eb6c-account-create-update-6xtp9"] Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.374903 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.376049 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ab98dc4-33db-4ca6-8393-b3d2454de757-operator-scripts\") pod \"glance-db-create-b628k\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.376142 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jg8j\" (UniqueName: \"kubernetes.io/projected/1ab98dc4-33db-4ca6-8393-b3d2454de757-kube-api-access-5jg8j\") pod \"glance-db-create-b628k\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.377680 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.387875 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-eb6c-account-create-update-6xtp9"] Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.478653 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ab98dc4-33db-4ca6-8393-b3d2454de757-operator-scripts\") pod \"glance-db-create-b628k\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.478725 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w6g2\" (UniqueName: \"kubernetes.io/projected/7b591166-3d20-4fb2-94e2-1c6c38555910-kube-api-access-2w6g2\") pod \"glance-eb6c-account-create-update-6xtp9\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.478772 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b591166-3d20-4fb2-94e2-1c6c38555910-operator-scripts\") pod \"glance-eb6c-account-create-update-6xtp9\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.478814 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jg8j\" (UniqueName: \"kubernetes.io/projected/1ab98dc4-33db-4ca6-8393-b3d2454de757-kube-api-access-5jg8j\") pod \"glance-db-create-b628k\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.479711 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ab98dc4-33db-4ca6-8393-b3d2454de757-operator-scripts\") pod \"glance-db-create-b628k\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.498944 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jg8j\" (UniqueName: \"kubernetes.io/projected/1ab98dc4-33db-4ca6-8393-b3d2454de757-kube-api-access-5jg8j\") pod \"glance-db-create-b628k\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.581072 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w6g2\" (UniqueName: \"kubernetes.io/projected/7b591166-3d20-4fb2-94e2-1c6c38555910-kube-api-access-2w6g2\") pod \"glance-eb6c-account-create-update-6xtp9\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.581133 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b591166-3d20-4fb2-94e2-1c6c38555910-operator-scripts\") pod \"glance-eb6c-account-create-update-6xtp9\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.582019 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b591166-3d20-4fb2-94e2-1c6c38555910-operator-scripts\") pod \"glance-eb6c-account-create-update-6xtp9\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.597629 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-b628k" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.599839 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w6g2\" (UniqueName: \"kubernetes.io/projected/7b591166-3d20-4fb2-94e2-1c6c38555910-kube-api-access-2w6g2\") pod \"glance-eb6c-account-create-update-6xtp9\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:46 crc kubenswrapper[4941]: I1130 08:08:46.738260 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.046618 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-b628k"] Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.219053 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-eb6c-account-create-update-6xtp9"] Nov 30 08:08:47 crc kubenswrapper[4941]: W1130 08:08:47.226271 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b591166_3d20_4fb2_94e2_1c6c38555910.slice/crio-e6bd50ada42b0fc4a98637144392a68f8b564bd3447760ae9b58b326ac20d4a3 WatchSource:0}: Error finding container e6bd50ada42b0fc4a98637144392a68f8b564bd3447760ae9b58b326ac20d4a3: Status 404 returned error can't find the container with id e6bd50ada42b0fc4a98637144392a68f8b564bd3447760ae9b58b326ac20d4a3 Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.498863 4941 generic.go:334] "Generic (PLEG): container finished" podID="1ab98dc4-33db-4ca6-8393-b3d2454de757" containerID="7d600181d16f5a62661188ed4388538845785344960331bd961e5b19fabbcfec" exitCode=0 Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.498936 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-b628k" event={"ID":"1ab98dc4-33db-4ca6-8393-b3d2454de757","Type":"ContainerDied","Data":"7d600181d16f5a62661188ed4388538845785344960331bd961e5b19fabbcfec"} Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.499195 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-b628k" event={"ID":"1ab98dc4-33db-4ca6-8393-b3d2454de757","Type":"ContainerStarted","Data":"642f8fbce5327cec7b21ba245c897122aa814886ba6ccac12d6c8415f38c2ef8"} Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.502502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-eb6c-account-create-update-6xtp9" event={"ID":"7b591166-3d20-4fb2-94e2-1c6c38555910","Type":"ContainerStarted","Data":"5deb1759bf043fe37d721b621991513743501477835c38c92e5d0d9bc4232d69"} Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.502581 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-eb6c-account-create-update-6xtp9" event={"ID":"7b591166-3d20-4fb2-94e2-1c6c38555910","Type":"ContainerStarted","Data":"e6bd50ada42b0fc4a98637144392a68f8b564bd3447760ae9b58b326ac20d4a3"} Nov 30 08:08:47 crc kubenswrapper[4941]: I1130 08:08:47.544014 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-eb6c-account-create-update-6xtp9" podStartSLOduration=1.543998027 podStartE2EDuration="1.543998027s" podCreationTimestamp="2025-11-30 08:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:08:47.539418505 +0000 UTC m=+4948.307590114" watchObservedRunningTime="2025-11-30 08:08:47.543998027 +0000 UTC m=+4948.312169636" Nov 30 08:08:48 crc kubenswrapper[4941]: I1130 08:08:48.520917 4941 generic.go:334] "Generic (PLEG): container finished" podID="7b591166-3d20-4fb2-94e2-1c6c38555910" containerID="5deb1759bf043fe37d721b621991513743501477835c38c92e5d0d9bc4232d69" exitCode=0 Nov 30 08:08:48 crc kubenswrapper[4941]: I1130 08:08:48.520984 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-eb6c-account-create-update-6xtp9" event={"ID":"7b591166-3d20-4fb2-94e2-1c6c38555910","Type":"ContainerDied","Data":"5deb1759bf043fe37d721b621991513743501477835c38c92e5d0d9bc4232d69"} Nov 30 08:08:48 crc kubenswrapper[4941]: I1130 08:08:48.968482 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-b628k" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.135616 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ab98dc4-33db-4ca6-8393-b3d2454de757-operator-scripts\") pod \"1ab98dc4-33db-4ca6-8393-b3d2454de757\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.135804 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jg8j\" (UniqueName: \"kubernetes.io/projected/1ab98dc4-33db-4ca6-8393-b3d2454de757-kube-api-access-5jg8j\") pod \"1ab98dc4-33db-4ca6-8393-b3d2454de757\" (UID: \"1ab98dc4-33db-4ca6-8393-b3d2454de757\") " Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.136838 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ab98dc4-33db-4ca6-8393-b3d2454de757-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1ab98dc4-33db-4ca6-8393-b3d2454de757" (UID: "1ab98dc4-33db-4ca6-8393-b3d2454de757"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.142355 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ab98dc4-33db-4ca6-8393-b3d2454de757-kube-api-access-5jg8j" (OuterVolumeSpecName: "kube-api-access-5jg8j") pod "1ab98dc4-33db-4ca6-8393-b3d2454de757" (UID: "1ab98dc4-33db-4ca6-8393-b3d2454de757"). InnerVolumeSpecName "kube-api-access-5jg8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.238774 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ab98dc4-33db-4ca6-8393-b3d2454de757-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.238841 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jg8j\" (UniqueName: \"kubernetes.io/projected/1ab98dc4-33db-4ca6-8393-b3d2454de757-kube-api-access-5jg8j\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.536977 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-b628k" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.563465 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-b628k" event={"ID":"1ab98dc4-33db-4ca6-8393-b3d2454de757","Type":"ContainerDied","Data":"642f8fbce5327cec7b21ba245c897122aa814886ba6ccac12d6c8415f38c2ef8"} Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.564005 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="642f8fbce5327cec7b21ba245c897122aa814886ba6ccac12d6c8415f38c2ef8" Nov 30 08:08:49 crc kubenswrapper[4941]: I1130 08:08:49.923549 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.061188 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b591166-3d20-4fb2-94e2-1c6c38555910-operator-scripts\") pod \"7b591166-3d20-4fb2-94e2-1c6c38555910\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.061405 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w6g2\" (UniqueName: \"kubernetes.io/projected/7b591166-3d20-4fb2-94e2-1c6c38555910-kube-api-access-2w6g2\") pod \"7b591166-3d20-4fb2-94e2-1c6c38555910\" (UID: \"7b591166-3d20-4fb2-94e2-1c6c38555910\") " Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.061915 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b591166-3d20-4fb2-94e2-1c6c38555910-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7b591166-3d20-4fb2-94e2-1c6c38555910" (UID: "7b591166-3d20-4fb2-94e2-1c6c38555910"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.065254 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b591166-3d20-4fb2-94e2-1c6c38555910-kube-api-access-2w6g2" (OuterVolumeSpecName: "kube-api-access-2w6g2") pod "7b591166-3d20-4fb2-94e2-1c6c38555910" (UID: "7b591166-3d20-4fb2-94e2-1c6c38555910"). InnerVolumeSpecName "kube-api-access-2w6g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.163615 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w6g2\" (UniqueName: \"kubernetes.io/projected/7b591166-3d20-4fb2-94e2-1c6c38555910-kube-api-access-2w6g2\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.163653 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b591166-3d20-4fb2-94e2-1c6c38555910-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.550265 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-eb6c-account-create-update-6xtp9" event={"ID":"7b591166-3d20-4fb2-94e2-1c6c38555910","Type":"ContainerDied","Data":"e6bd50ada42b0fc4a98637144392a68f8b564bd3447760ae9b58b326ac20d4a3"} Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.550771 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6bd50ada42b0fc4a98637144392a68f8b564bd3447760ae9b58b326ac20d4a3" Nov 30 08:08:50 crc kubenswrapper[4941]: I1130 08:08:50.550393 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-eb6c-account-create-update-6xtp9" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.604573 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-cd96b"] Nov 30 08:08:51 crc kubenswrapper[4941]: E1130 08:08:51.605289 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b591166-3d20-4fb2-94e2-1c6c38555910" containerName="mariadb-account-create-update" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.605312 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b591166-3d20-4fb2-94e2-1c6c38555910" containerName="mariadb-account-create-update" Nov 30 08:08:51 crc kubenswrapper[4941]: E1130 08:08:51.605508 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ab98dc4-33db-4ca6-8393-b3d2454de757" containerName="mariadb-database-create" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.605524 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ab98dc4-33db-4ca6-8393-b3d2454de757" containerName="mariadb-database-create" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.605881 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b591166-3d20-4fb2-94e2-1c6c38555910" containerName="mariadb-account-create-update" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.605925 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ab98dc4-33db-4ca6-8393-b3d2454de757" containerName="mariadb-database-create" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.607081 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.613267 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-m5w9r" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.613567 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.615018 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-cd96b"] Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.696375 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmnlt\" (UniqueName: \"kubernetes.io/projected/35b780b1-dbee-4f02-9566-3e8e407d8ce5-kube-api-access-wmnlt\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.696659 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-config-data\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.696836 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-combined-ca-bundle\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.697051 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-db-sync-config-data\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.799455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-config-data\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.799539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-combined-ca-bundle\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.799609 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-db-sync-config-data\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.799669 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmnlt\" (UniqueName: \"kubernetes.io/projected/35b780b1-dbee-4f02-9566-3e8e407d8ce5-kube-api-access-wmnlt\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.810186 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-config-data\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.810185 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-db-sync-config-data\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.824410 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-combined-ca-bundle\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.825738 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmnlt\" (UniqueName: \"kubernetes.io/projected/35b780b1-dbee-4f02-9566-3e8e407d8ce5-kube-api-access-wmnlt\") pod \"glance-db-sync-cd96b\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:51 crc kubenswrapper[4941]: I1130 08:08:51.943380 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cd96b" Nov 30 08:08:52 crc kubenswrapper[4941]: I1130 08:08:52.605749 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-cd96b"] Nov 30 08:08:53 crc kubenswrapper[4941]: I1130 08:08:53.580964 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cd96b" event={"ID":"35b780b1-dbee-4f02-9566-3e8e407d8ce5","Type":"ContainerStarted","Data":"dfc4e33771618904b1d483cc657888dede507cb6366d627fde70f8586fc76f64"} Nov 30 08:09:12 crc kubenswrapper[4941]: I1130 08:09:12.836532 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cd96b" event={"ID":"35b780b1-dbee-4f02-9566-3e8e407d8ce5","Type":"ContainerStarted","Data":"4cc69630185f7c314dbd3633c69cf749dbd760579e646cbd40b6ea95bad4a3cf"} Nov 30 08:09:12 crc kubenswrapper[4941]: I1130 08:09:12.872509 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-cd96b" podStartSLOduration=3.276486296 podStartE2EDuration="21.872475391s" podCreationTimestamp="2025-11-30 08:08:51 +0000 UTC" firstStartedPulling="2025-11-30 08:08:52.607795569 +0000 UTC m=+4953.375967178" lastFinishedPulling="2025-11-30 08:09:11.203784664 +0000 UTC m=+4971.971956273" observedRunningTime="2025-11-30 08:09:12.868300902 +0000 UTC m=+4973.636472551" watchObservedRunningTime="2025-11-30 08:09:12.872475391 +0000 UTC m=+4973.640647030" Nov 30 08:09:15 crc kubenswrapper[4941]: I1130 08:09:15.872422 4941 generic.go:334] "Generic (PLEG): container finished" podID="35b780b1-dbee-4f02-9566-3e8e407d8ce5" containerID="4cc69630185f7c314dbd3633c69cf749dbd760579e646cbd40b6ea95bad4a3cf" exitCode=0 Nov 30 08:09:15 crc kubenswrapper[4941]: I1130 08:09:15.872483 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cd96b" event={"ID":"35b780b1-dbee-4f02-9566-3e8e407d8ce5","Type":"ContainerDied","Data":"4cc69630185f7c314dbd3633c69cf749dbd760579e646cbd40b6ea95bad4a3cf"} Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.318801 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cd96b" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.402267 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-combined-ca-bundle\") pod \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.402625 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-db-sync-config-data\") pod \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.402669 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-config-data\") pod \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.402719 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmnlt\" (UniqueName: \"kubernetes.io/projected/35b780b1-dbee-4f02-9566-3e8e407d8ce5-kube-api-access-wmnlt\") pod \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\" (UID: \"35b780b1-dbee-4f02-9566-3e8e407d8ce5\") " Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.487724 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "35b780b1-dbee-4f02-9566-3e8e407d8ce5" (UID: "35b780b1-dbee-4f02-9566-3e8e407d8ce5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.487911 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b780b1-dbee-4f02-9566-3e8e407d8ce5-kube-api-access-wmnlt" (OuterVolumeSpecName: "kube-api-access-wmnlt") pod "35b780b1-dbee-4f02-9566-3e8e407d8ce5" (UID: "35b780b1-dbee-4f02-9566-3e8e407d8ce5"). InnerVolumeSpecName "kube-api-access-wmnlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.491782 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35b780b1-dbee-4f02-9566-3e8e407d8ce5" (UID: "35b780b1-dbee-4f02-9566-3e8e407d8ce5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.504809 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.504844 4941 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.504856 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmnlt\" (UniqueName: \"kubernetes.io/projected/35b780b1-dbee-4f02-9566-3e8e407d8ce5-kube-api-access-wmnlt\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.519507 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-config-data" (OuterVolumeSpecName: "config-data") pod "35b780b1-dbee-4f02-9566-3e8e407d8ce5" (UID: "35b780b1-dbee-4f02-9566-3e8e407d8ce5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.607366 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35b780b1-dbee-4f02-9566-3e8e407d8ce5-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.894519 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-cd96b" event={"ID":"35b780b1-dbee-4f02-9566-3e8e407d8ce5","Type":"ContainerDied","Data":"dfc4e33771618904b1d483cc657888dede507cb6366d627fde70f8586fc76f64"} Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.894575 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfc4e33771618904b1d483cc657888dede507cb6366d627fde70f8586fc76f64" Nov 30 08:09:17 crc kubenswrapper[4941]: I1130 08:09:17.894594 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-cd96b" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.411162 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:18 crc kubenswrapper[4941]: E1130 08:09:18.411936 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b780b1-dbee-4f02-9566-3e8e407d8ce5" containerName="glance-db-sync" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.411948 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b780b1-dbee-4f02-9566-3e8e407d8ce5" containerName="glance-db-sync" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.412142 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b780b1-dbee-4f02-9566-3e8e407d8ce5" containerName="glance-db-sync" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.413676 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.417770 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.417888 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.417903 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-m5w9r" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.417775 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.434001 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.524783 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.524832 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.524911 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-logs\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.524965 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-config-data\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.524991 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-scripts\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.525024 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dfsz\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-kube-api-access-6dfsz\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.525058 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-ceph\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.541978 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76cdfb6c59-wzd9h"] Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.544196 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.575032 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76cdfb6c59-wzd9h"] Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.626520 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-nb\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.626868 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dfsz\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-kube-api-access-6dfsz\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.626979 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-ceph\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627065 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627130 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627240 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-dns-svc\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627350 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-sb\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627445 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-logs\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627570 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-config-data\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627655 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-scripts\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627736 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxhnr\" (UniqueName: \"kubernetes.io/projected/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-kube-api-access-zxhnr\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.627823 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-config\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.628491 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.629587 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-logs\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.634301 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-ceph\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.634960 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-scripts\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.640171 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.650562 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dfsz\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-kube-api-access-6dfsz\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.662469 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-config-data\") pod \"glance-default-external-api-0\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.730202 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-dns-svc\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.731657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-sb\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.731923 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxhnr\" (UniqueName: \"kubernetes.io/projected/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-kube-api-access-zxhnr\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.732071 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-config\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.733067 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-nb\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.733023 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-sb\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.733217 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-config\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.731603 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-dns-svc\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.734038 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-nb\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.736863 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.757494 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.758989 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.780812 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.782555 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxhnr\" (UniqueName: \"kubernetes.io/projected/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-kube-api-access-zxhnr\") pod \"dnsmasq-dns-76cdfb6c59-wzd9h\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.787156 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834522 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834566 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834612 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834643 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834689 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834734 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5pfw\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-kube-api-access-n5pfw\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.834751 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.877174 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939665 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939720 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939772 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939795 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939844 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939888 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5pfw\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-kube-api-access-n5pfw\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.939908 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.941914 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-logs\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.942630 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.948773 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.952479 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.953608 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.953988 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:18 crc kubenswrapper[4941]: I1130 08:09:18.957759 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5pfw\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-kube-api-access-n5pfw\") pod \"glance-default-internal-api-0\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.153652 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.448583 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.464250 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76cdfb6c59-wzd9h"] Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.628415 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.680537 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.916574 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"969f8869-028f-4806-be30-a47bc98bb1d4","Type":"ContainerStarted","Data":"4efba56b2a012f0e15050072b36eb15a80c56602dca12858c558c7b95978c353"} Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.919785 4941 generic.go:334] "Generic (PLEG): container finished" podID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerID="1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03" exitCode=0 Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.919960 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" event={"ID":"389464a4-94f6-4b32-ad06-1d05ec3c7ef3","Type":"ContainerDied","Data":"1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03"} Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.920021 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" event={"ID":"389464a4-94f6-4b32-ad06-1d05ec3c7ef3","Type":"ContainerStarted","Data":"b88a0894054f9dbe033c8e147662d5156c3b72775c7be8354dab424a01333b86"} Nov 30 08:09:19 crc kubenswrapper[4941]: I1130 08:09:19.927397 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569","Type":"ContainerStarted","Data":"0eb2732af812944ce8ad4b26e142498e3a2be55068ecd3d2074a690f2947774c"} Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.943592 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" event={"ID":"389464a4-94f6-4b32-ad06-1d05ec3c7ef3","Type":"ContainerStarted","Data":"8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997"} Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.949242 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.952594 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569","Type":"ContainerStarted","Data":"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986"} Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.952671 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569","Type":"ContainerStarted","Data":"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99"} Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.952733 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-log" containerID="cri-o://7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99" gracePeriod=30 Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.952770 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-httpd" containerID="cri-o://9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986" gracePeriod=30 Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.970169 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" podStartSLOduration=2.970147736 podStartE2EDuration="2.970147736s" podCreationTimestamp="2025-11-30 08:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:20.96768633 +0000 UTC m=+4981.735857929" watchObservedRunningTime="2025-11-30 08:09:20.970147736 +0000 UTC m=+4981.738319345" Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.970674 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"969f8869-028f-4806-be30-a47bc98bb1d4","Type":"ContainerStarted","Data":"de8b32cc4bb6ee88a6bf4a436b67990f95d5e8817cc393bb19a5ec5342bdc6b9"} Nov 30 08:09:20 crc kubenswrapper[4941]: I1130 08:09:20.970725 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"969f8869-028f-4806-be30-a47bc98bb1d4","Type":"ContainerStarted","Data":"3f9dadf4bacacb862ab997fd64eb267c2795f95fff271c30d2b48d2d70ee4391"} Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.001411 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.001382561 podStartE2EDuration="3.001382561s" podCreationTimestamp="2025-11-30 08:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:20.991135844 +0000 UTC m=+4981.759307443" watchObservedRunningTime="2025-11-30 08:09:21.001382561 +0000 UTC m=+4981.769554180" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.019658 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.019626745 podStartE2EDuration="3.019626745s" podCreationTimestamp="2025-11-30 08:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:21.016510288 +0000 UTC m=+4981.784681907" watchObservedRunningTime="2025-11-30 08:09:21.019626745 +0000 UTC m=+4981.787798364" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.540593 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.596714 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.717487 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-logs\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719468 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-httpd-run\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719515 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-combined-ca-bundle\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719563 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-scripts\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719641 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dfsz\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-kube-api-access-6dfsz\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719718 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-ceph\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719757 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-config-data\") pod \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\" (UID: \"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569\") " Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.719394 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-logs" (OuterVolumeSpecName: "logs") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.721925 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.730654 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-scripts" (OuterVolumeSpecName: "scripts") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.732635 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-ceph" (OuterVolumeSpecName: "ceph") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.734316 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-kube-api-access-6dfsz" (OuterVolumeSpecName: "kube-api-access-6dfsz") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "kube-api-access-6dfsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.761058 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.821838 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.822308 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.822320 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.822345 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.822356 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dfsz\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-kube-api-access-6dfsz\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.822365 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.825909 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-config-data" (OuterVolumeSpecName: "config-data") pod "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" (UID: "0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.924699 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.984843 4941 generic.go:334] "Generic (PLEG): container finished" podID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerID="9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986" exitCode=143 Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.984896 4941 generic.go:334] "Generic (PLEG): container finished" podID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerID="7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99" exitCode=143 Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.984966 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569","Type":"ContainerDied","Data":"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986"} Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.985001 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.985061 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569","Type":"ContainerDied","Data":"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99"} Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.985091 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569","Type":"ContainerDied","Data":"0eb2732af812944ce8ad4b26e142498e3a2be55068ecd3d2074a690f2947774c"} Nov 30 08:09:21 crc kubenswrapper[4941]: I1130 08:09:21.985130 4941 scope.go:117] "RemoveContainer" containerID="9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.033510 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.042394 4941 scope.go:117] "RemoveContainer" containerID="7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.050735 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.069865 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:22 crc kubenswrapper[4941]: E1130 08:09:22.070272 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-httpd" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.070287 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-httpd" Nov 30 08:09:22 crc kubenswrapper[4941]: E1130 08:09:22.070316 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-log" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.070341 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-log" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.070569 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-httpd" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.070594 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" containerName="glance-log" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.071789 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.073671 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.096920 4941 scope.go:117] "RemoveContainer" containerID="9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986" Nov 30 08:09:22 crc kubenswrapper[4941]: E1130 08:09:22.098148 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986\": container with ID starting with 9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986 not found: ID does not exist" containerID="9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.101903 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986"} err="failed to get container status \"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986\": rpc error: code = NotFound desc = could not find container \"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986\": container with ID starting with 9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986 not found: ID does not exist" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.102014 4941 scope.go:117] "RemoveContainer" containerID="7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.105947 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:22 crc kubenswrapper[4941]: E1130 08:09:22.107121 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99\": container with ID starting with 7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99 not found: ID does not exist" containerID="7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.107235 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99"} err="failed to get container status \"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99\": rpc error: code = NotFound desc = could not find container \"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99\": container with ID starting with 7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99 not found: ID does not exist" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.107321 4941 scope.go:117] "RemoveContainer" containerID="9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.107852 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986"} err="failed to get container status \"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986\": rpc error: code = NotFound desc = could not find container \"9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986\": container with ID starting with 9227b525745ae30a3563ce02e370bf0224748ddf5a3538131cbe0e85e8a2d986 not found: ID does not exist" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.107979 4941 scope.go:117] "RemoveContainer" containerID="7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.108481 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99"} err="failed to get container status \"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99\": rpc error: code = NotFound desc = could not find container \"7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99\": container with ID starting with 7877627f279198cdf150519e1d2cbdee491373e1bd7851680997490868ad1f99 not found: ID does not exist" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.235187 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-logs\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.235577 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.235674 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8clm\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-kube-api-access-l8clm\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.235820 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.235976 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-scripts\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.236061 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-ceph\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.236129 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-config-data\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338136 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-scripts\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338192 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-ceph\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338215 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-config-data\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338251 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-logs\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338275 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338299 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8clm\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-kube-api-access-l8clm\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338374 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.338996 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.339452 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-logs\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.343430 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-ceph\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.348411 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.349855 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-config-data\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.350965 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-scripts\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.363173 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8clm\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-kube-api-access-l8clm\") pod \"glance-default-external-api-0\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.420223 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.997683 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-log" containerID="cri-o://3f9dadf4bacacb862ab997fd64eb267c2795f95fff271c30d2b48d2d70ee4391" gracePeriod=30 Nov 30 08:09:22 crc kubenswrapper[4941]: I1130 08:09:22.997759 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-httpd" containerID="cri-o://de8b32cc4bb6ee88a6bf4a436b67990f95d5e8817cc393bb19a5ec5342bdc6b9" gracePeriod=30 Nov 30 08:09:23 crc kubenswrapper[4941]: I1130 08:09:23.049558 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:09:23 crc kubenswrapper[4941]: W1130 08:09:23.061896 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88976074_47b8_4a90_9509_d2056016dcaa.slice/crio-017a4701932556c3fe363f1ebfe851ef86a7faa4512ae89e892846ec086cfe00 WatchSource:0}: Error finding container 017a4701932556c3fe363f1ebfe851ef86a7faa4512ae89e892846ec086cfe00: Status 404 returned error can't find the container with id 017a4701932556c3fe363f1ebfe851ef86a7faa4512ae89e892846ec086cfe00 Nov 30 08:09:23 crc kubenswrapper[4941]: I1130 08:09:23.536374 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569" path="/var/lib/kubelet/pods/0dd2bb09-1b76-42c7-b4fb-f09bc9d9f569/volumes" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.022700 4941 generic.go:334] "Generic (PLEG): container finished" podID="969f8869-028f-4806-be30-a47bc98bb1d4" containerID="de8b32cc4bb6ee88a6bf4a436b67990f95d5e8817cc393bb19a5ec5342bdc6b9" exitCode=0 Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.032494 4941 generic.go:334] "Generic (PLEG): container finished" podID="969f8869-028f-4806-be30-a47bc98bb1d4" containerID="3f9dadf4bacacb862ab997fd64eb267c2795f95fff271c30d2b48d2d70ee4391" exitCode=143 Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.032775 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"969f8869-028f-4806-be30-a47bc98bb1d4","Type":"ContainerDied","Data":"de8b32cc4bb6ee88a6bf4a436b67990f95d5e8817cc393bb19a5ec5342bdc6b9"} Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.032888 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"969f8869-028f-4806-be30-a47bc98bb1d4","Type":"ContainerDied","Data":"3f9dadf4bacacb862ab997fd64eb267c2795f95fff271c30d2b48d2d70ee4391"} Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.062845 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"88976074-47b8-4a90-9509-d2056016dcaa","Type":"ContainerStarted","Data":"23e89525e8ec1bbad26ebc0a81c674a22552a3370d24be1888065f2837044558"} Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.062930 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"88976074-47b8-4a90-9509-d2056016dcaa","Type":"ContainerStarted","Data":"017a4701932556c3fe363f1ebfe851ef86a7faa4512ae89e892846ec086cfe00"} Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.175804 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.282133 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5pfw\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-kube-api-access-n5pfw\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.282265 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-config-data\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.283138 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-httpd-run\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.283217 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-scripts\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.283349 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-combined-ca-bundle\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.283522 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-logs\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.283575 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-ceph\") pod \"969f8869-028f-4806-be30-a47bc98bb1d4\" (UID: \"969f8869-028f-4806-be30-a47bc98bb1d4\") " Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.283702 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.284046 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.284430 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-logs" (OuterVolumeSpecName: "logs") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.290089 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-ceph" (OuterVolumeSpecName: "ceph") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.290174 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-kube-api-access-n5pfw" (OuterVolumeSpecName: "kube-api-access-n5pfw") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "kube-api-access-n5pfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.292242 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-scripts" (OuterVolumeSpecName: "scripts") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.320316 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.355623 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-config-data" (OuterVolumeSpecName: "config-data") pod "969f8869-028f-4806-be30-a47bc98bb1d4" (UID: "969f8869-028f-4806-be30-a47bc98bb1d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.386235 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/969f8869-028f-4806-be30-a47bc98bb1d4-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.386279 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.386346 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5pfw\" (UniqueName: \"kubernetes.io/projected/969f8869-028f-4806-be30-a47bc98bb1d4-kube-api-access-n5pfw\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.386365 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.386378 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:24 crc kubenswrapper[4941]: I1130 08:09:24.386390 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/969f8869-028f-4806-be30-a47bc98bb1d4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.080681 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"88976074-47b8-4a90-9509-d2056016dcaa","Type":"ContainerStarted","Data":"032392ac773741ecded3844a05c9ccffc22f4028abdc9447f16aa480b6be2dd5"} Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.083564 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"969f8869-028f-4806-be30-a47bc98bb1d4","Type":"ContainerDied","Data":"4efba56b2a012f0e15050072b36eb15a80c56602dca12858c558c7b95978c353"} Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.083601 4941 scope.go:117] "RemoveContainer" containerID="de8b32cc4bb6ee88a6bf4a436b67990f95d5e8817cc393bb19a5ec5342bdc6b9" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.083654 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.113597 4941 scope.go:117] "RemoveContainer" containerID="3f9dadf4bacacb862ab997fd64eb267c2795f95fff271c30d2b48d2d70ee4391" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.125908 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.125871036 podStartE2EDuration="3.125871036s" podCreationTimestamp="2025-11-30 08:09:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:25.101854644 +0000 UTC m=+4985.870026263" watchObservedRunningTime="2025-11-30 08:09:25.125871036 +0000 UTC m=+4985.894042665" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.160150 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.176226 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.196021 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:25 crc kubenswrapper[4941]: E1130 08:09:25.196529 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-log" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.196551 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-log" Nov 30 08:09:25 crc kubenswrapper[4941]: E1130 08:09:25.196580 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-httpd" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.196588 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-httpd" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.196770 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-httpd" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.196795 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" containerName="glance-log" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.197846 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.202029 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.235967 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307534 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307748 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4hss\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-kube-api-access-g4hss\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307784 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307803 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307846 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307956 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-ceph\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.307983 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-logs\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410370 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410433 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-ceph\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410511 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-logs\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410562 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410696 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4hss\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-kube-api-access-g4hss\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.410768 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.411063 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-logs\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.412022 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.416817 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.418223 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-ceph\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.420804 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.421771 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.431186 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4hss\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-kube-api-access-g4hss\") pod \"glance-default-internal-api-0\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.532320 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="969f8869-028f-4806-be30-a47bc98bb1d4" path="/var/lib/kubelet/pods/969f8869-028f-4806-be30-a47bc98bb1d4/volumes" Nov 30 08:09:25 crc kubenswrapper[4941]: I1130 08:09:25.570024 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:26 crc kubenswrapper[4941]: I1130 08:09:26.135559 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:09:26 crc kubenswrapper[4941]: W1130 08:09:26.498389 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5a62427_4986_40d5_a842_9ac48dbd0a21.slice/crio-753641fb758bbbfeefa11a5666edb9a261721dcdd99944fe07de3240e702402e WatchSource:0}: Error finding container 753641fb758bbbfeefa11a5666edb9a261721dcdd99944fe07de3240e702402e: Status 404 returned error can't find the container with id 753641fb758bbbfeefa11a5666edb9a261721dcdd99944fe07de3240e702402e Nov 30 08:09:27 crc kubenswrapper[4941]: I1130 08:09:27.111560 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5a62427-4986-40d5-a842-9ac48dbd0a21","Type":"ContainerStarted","Data":"9d5134cc3c3a66dac5b1d8a9c588d774c803bba9d8bc23c51706ff1df64c0d6b"} Nov 30 08:09:27 crc kubenswrapper[4941]: I1130 08:09:27.111879 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5a62427-4986-40d5-a842-9ac48dbd0a21","Type":"ContainerStarted","Data":"753641fb758bbbfeefa11a5666edb9a261721dcdd99944fe07de3240e702402e"} Nov 30 08:09:28 crc kubenswrapper[4941]: I1130 08:09:28.123738 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5a62427-4986-40d5-a842-9ac48dbd0a21","Type":"ContainerStarted","Data":"565d0ba0f7bcd368e68e46139d3bb2d304a9ff85dc71366ee5c1e1dfa4c145f8"} Nov 30 08:09:28 crc kubenswrapper[4941]: I1130 08:09:28.162397 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.16236674 podStartE2EDuration="3.16236674s" podCreationTimestamp="2025-11-30 08:09:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:28.154503408 +0000 UTC m=+4988.922675067" watchObservedRunningTime="2025-11-30 08:09:28.16236674 +0000 UTC m=+4988.930538359" Nov 30 08:09:28 crc kubenswrapper[4941]: I1130 08:09:28.879676 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:09:28 crc kubenswrapper[4941]: I1130 08:09:28.987033 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b9957f557-pv64j"] Nov 30 08:09:28 crc kubenswrapper[4941]: I1130 08:09:28.987539 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b9957f557-pv64j" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" containerName="dnsmasq-dns" containerID="cri-o://bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba" gracePeriod=10 Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.133403 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.166723 4941 generic.go:334] "Generic (PLEG): container finished" podID="f466e9bf-9be7-4555-8667-a8b710462f04" containerID="bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba" exitCode=0 Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.166798 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9957f557-pv64j" event={"ID":"f466e9bf-9be7-4555-8667-a8b710462f04","Type":"ContainerDied","Data":"bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba"} Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.166843 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b9957f557-pv64j" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.166878 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b9957f557-pv64j" event={"ID":"f466e9bf-9be7-4555-8667-a8b710462f04","Type":"ContainerDied","Data":"4a09d38985ce462d9ff39b8ad2118ce24c5331772211fb8b8f947c6a2eafbbe5"} Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.166905 4941 scope.go:117] "RemoveContainer" containerID="bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.228976 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-dns-svc\") pod \"f466e9bf-9be7-4555-8667-a8b710462f04\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.229088 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-config\") pod \"f466e9bf-9be7-4555-8667-a8b710462f04\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.229235 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-sb\") pod \"f466e9bf-9be7-4555-8667-a8b710462f04\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.229263 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-nb\") pod \"f466e9bf-9be7-4555-8667-a8b710462f04\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.230234 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g78hr\" (UniqueName: \"kubernetes.io/projected/f466e9bf-9be7-4555-8667-a8b710462f04-kube-api-access-g78hr\") pod \"f466e9bf-9be7-4555-8667-a8b710462f04\" (UID: \"f466e9bf-9be7-4555-8667-a8b710462f04\") " Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.236166 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f466e9bf-9be7-4555-8667-a8b710462f04-kube-api-access-g78hr" (OuterVolumeSpecName: "kube-api-access-g78hr") pod "f466e9bf-9be7-4555-8667-a8b710462f04" (UID: "f466e9bf-9be7-4555-8667-a8b710462f04"). InnerVolumeSpecName "kube-api-access-g78hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.282595 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f466e9bf-9be7-4555-8667-a8b710462f04" (UID: "f466e9bf-9be7-4555-8667-a8b710462f04"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.285342 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f466e9bf-9be7-4555-8667-a8b710462f04" (UID: "f466e9bf-9be7-4555-8667-a8b710462f04"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.288870 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f466e9bf-9be7-4555-8667-a8b710462f04" (UID: "f466e9bf-9be7-4555-8667-a8b710462f04"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.295172 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-config" (OuterVolumeSpecName: "config") pod "f466e9bf-9be7-4555-8667-a8b710462f04" (UID: "f466e9bf-9be7-4555-8667-a8b710462f04"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.332359 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.332412 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.332429 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g78hr\" (UniqueName: \"kubernetes.io/projected/f466e9bf-9be7-4555-8667-a8b710462f04-kube-api-access-g78hr\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.332448 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.332463 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f466e9bf-9be7-4555-8667-a8b710462f04-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.455059 4941 scope.go:117] "RemoveContainer" containerID="683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.477059 4941 scope.go:117] "RemoveContainer" containerID="bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba" Nov 30 08:09:30 crc kubenswrapper[4941]: E1130 08:09:30.477707 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba\": container with ID starting with bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba not found: ID does not exist" containerID="bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.477774 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba"} err="failed to get container status \"bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba\": rpc error: code = NotFound desc = could not find container \"bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba\": container with ID starting with bc6bec53d3e12fcd54e6200b6c141b14b2deb2973bc1ac77fe26b8f0252753ba not found: ID does not exist" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.477813 4941 scope.go:117] "RemoveContainer" containerID="683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c" Nov 30 08:09:30 crc kubenswrapper[4941]: E1130 08:09:30.478296 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c\": container with ID starting with 683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c not found: ID does not exist" containerID="683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.478388 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c"} err="failed to get container status \"683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c\": rpc error: code = NotFound desc = could not find container \"683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c\": container with ID starting with 683cb4b402561dcdbc89c75518d8339e7359e3d7f79231bfa894ea728232953c not found: ID does not exist" Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.523858 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b9957f557-pv64j"] Nov 30 08:09:30 crc kubenswrapper[4941]: I1130 08:09:30.533844 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b9957f557-pv64j"] Nov 30 08:09:31 crc kubenswrapper[4941]: I1130 08:09:31.536863 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" path="/var/lib/kubelet/pods/f466e9bf-9be7-4555-8667-a8b710462f04/volumes" Nov 30 08:09:32 crc kubenswrapper[4941]: I1130 08:09:32.420953 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 08:09:32 crc kubenswrapper[4941]: I1130 08:09:32.421043 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 08:09:32 crc kubenswrapper[4941]: I1130 08:09:32.481650 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 08:09:32 crc kubenswrapper[4941]: I1130 08:09:32.498501 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 08:09:33 crc kubenswrapper[4941]: I1130 08:09:33.206550 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 08:09:33 crc kubenswrapper[4941]: I1130 08:09:33.206643 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 08:09:35 crc kubenswrapper[4941]: I1130 08:09:35.178674 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 08:09:35 crc kubenswrapper[4941]: I1130 08:09:35.228070 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 08:09:35 crc kubenswrapper[4941]: I1130 08:09:35.570485 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:35 crc kubenswrapper[4941]: I1130 08:09:35.570930 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:35 crc kubenswrapper[4941]: I1130 08:09:35.630294 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:35 crc kubenswrapper[4941]: I1130 08:09:35.643671 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:36 crc kubenswrapper[4941]: I1130 08:09:36.240649 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:36 crc kubenswrapper[4941]: I1130 08:09:36.240722 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:38 crc kubenswrapper[4941]: I1130 08:09:38.096281 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:38 crc kubenswrapper[4941]: I1130 08:09:38.101748 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.168106 4941 scope.go:117] "RemoveContainer" containerID="8f89299237a8f79203b2c6be09045d7b8ee1487341de57c5934eda94d977bdd3" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.203559 4941 scope.go:117] "RemoveContainer" containerID="03e201e8c22e22776dcfb88223134e0306f00bf46f055f623122afaa6cda2d3e" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.628381 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-gpqrl"] Nov 30 08:09:44 crc kubenswrapper[4941]: E1130 08:09:44.628968 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" containerName="dnsmasq-dns" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.628983 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" containerName="dnsmasq-dns" Nov 30 08:09:44 crc kubenswrapper[4941]: E1130 08:09:44.629000 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" containerName="init" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.629005 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" containerName="init" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.629153 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f466e9bf-9be7-4555-8667-a8b710462f04" containerName="dnsmasq-dns" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.629766 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.650029 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-gpqrl"] Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.767207 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ww9n\" (UniqueName: \"kubernetes.io/projected/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-kube-api-access-7ww9n\") pod \"placement-db-create-gpqrl\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.767741 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-operator-scripts\") pod \"placement-db-create-gpqrl\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.777490 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5f5b-account-create-update-k9fmn"] Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.779384 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.782304 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.797374 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f5b-account-create-update-k9fmn"] Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.870919 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ww9n\" (UniqueName: \"kubernetes.io/projected/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-kube-api-access-7ww9n\") pod \"placement-db-create-gpqrl\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.870992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwtl7\" (UniqueName: \"kubernetes.io/projected/00c3f957-7477-4a82-9739-c4beeb006bbd-kube-api-access-fwtl7\") pod \"placement-5f5b-account-create-update-k9fmn\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.871108 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-operator-scripts\") pod \"placement-db-create-gpqrl\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.871133 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c3f957-7477-4a82-9739-c4beeb006bbd-operator-scripts\") pod \"placement-5f5b-account-create-update-k9fmn\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.872000 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-operator-scripts\") pod \"placement-db-create-gpqrl\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.973404 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c3f957-7477-4a82-9739-c4beeb006bbd-operator-scripts\") pod \"placement-5f5b-account-create-update-k9fmn\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.974023 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwtl7\" (UniqueName: \"kubernetes.io/projected/00c3f957-7477-4a82-9739-c4beeb006bbd-kube-api-access-fwtl7\") pod \"placement-5f5b-account-create-update-k9fmn\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:44 crc kubenswrapper[4941]: I1130 08:09:44.974790 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c3f957-7477-4a82-9739-c4beeb006bbd-operator-scripts\") pod \"placement-5f5b-account-create-update-k9fmn\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:45 crc kubenswrapper[4941]: I1130 08:09:45.396395 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ww9n\" (UniqueName: \"kubernetes.io/projected/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-kube-api-access-7ww9n\") pod \"placement-db-create-gpqrl\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:45 crc kubenswrapper[4941]: I1130 08:09:45.396411 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwtl7\" (UniqueName: \"kubernetes.io/projected/00c3f957-7477-4a82-9739-c4beeb006bbd-kube-api-access-fwtl7\") pod \"placement-5f5b-account-create-update-k9fmn\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:45 crc kubenswrapper[4941]: I1130 08:09:45.404750 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:45 crc kubenswrapper[4941]: I1130 08:09:45.552078 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:45 crc kubenswrapper[4941]: I1130 08:09:45.883639 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5f5b-account-create-update-k9fmn"] Nov 30 08:09:45 crc kubenswrapper[4941]: W1130 08:09:45.883986 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00c3f957_7477_4a82_9739_c4beeb006bbd.slice/crio-d3bf45f46f6af96b27fa036b6eb4d50e754056d3a1692942433e1bac4a835d14 WatchSource:0}: Error finding container d3bf45f46f6af96b27fa036b6eb4d50e754056d3a1692942433e1bac4a835d14: Status 404 returned error can't find the container with id d3bf45f46f6af96b27fa036b6eb4d50e754056d3a1692942433e1bac4a835d14 Nov 30 08:09:46 crc kubenswrapper[4941]: W1130 08:09:46.054321 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c22aeb0_d80f_41b2_b261_6d8105dc83b8.slice/crio-705e83937a51a4090c4f04c5c3566b0e3196e9d7fdc54459160d9484504946b7 WatchSource:0}: Error finding container 705e83937a51a4090c4f04c5c3566b0e3196e9d7fdc54459160d9484504946b7: Status 404 returned error can't find the container with id 705e83937a51a4090c4f04c5c3566b0e3196e9d7fdc54459160d9484504946b7 Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.054648 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-gpqrl"] Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.359485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-gpqrl" event={"ID":"2c22aeb0-d80f-41b2-b261-6d8105dc83b8","Type":"ContainerStarted","Data":"9f512dc3396d04a9f7780fbb3233f19736fbf4746f61d33480ef85a32c2b6ed5"} Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.359547 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-gpqrl" event={"ID":"2c22aeb0-d80f-41b2-b261-6d8105dc83b8","Type":"ContainerStarted","Data":"705e83937a51a4090c4f04c5c3566b0e3196e9d7fdc54459160d9484504946b7"} Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.362972 4941 generic.go:334] "Generic (PLEG): container finished" podID="00c3f957-7477-4a82-9739-c4beeb006bbd" containerID="cd5d2234b051f881222cc935693c7cc015798ac4a998b0b0ca4be7af0628d931" exitCode=0 Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.363021 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f5b-account-create-update-k9fmn" event={"ID":"00c3f957-7477-4a82-9739-c4beeb006bbd","Type":"ContainerDied","Data":"cd5d2234b051f881222cc935693c7cc015798ac4a998b0b0ca4be7af0628d931"} Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.363075 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f5b-account-create-update-k9fmn" event={"ID":"00c3f957-7477-4a82-9739-c4beeb006bbd","Type":"ContainerStarted","Data":"d3bf45f46f6af96b27fa036b6eb4d50e754056d3a1692942433e1bac4a835d14"} Nov 30 08:09:46 crc kubenswrapper[4941]: I1130 08:09:46.403104 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-gpqrl" podStartSLOduration=2.403068777 podStartE2EDuration="2.403068777s" podCreationTimestamp="2025-11-30 08:09:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:46.378216659 +0000 UTC m=+5007.146388278" watchObservedRunningTime="2025-11-30 08:09:46.403068777 +0000 UTC m=+5007.171240406" Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.388486 4941 generic.go:334] "Generic (PLEG): container finished" podID="2c22aeb0-d80f-41b2-b261-6d8105dc83b8" containerID="9f512dc3396d04a9f7780fbb3233f19736fbf4746f61d33480ef85a32c2b6ed5" exitCode=0 Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.388623 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-gpqrl" event={"ID":"2c22aeb0-d80f-41b2-b261-6d8105dc83b8","Type":"ContainerDied","Data":"9f512dc3396d04a9f7780fbb3233f19736fbf4746f61d33480ef85a32c2b6ed5"} Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.782178 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.938761 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwtl7\" (UniqueName: \"kubernetes.io/projected/00c3f957-7477-4a82-9739-c4beeb006bbd-kube-api-access-fwtl7\") pod \"00c3f957-7477-4a82-9739-c4beeb006bbd\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.938977 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c3f957-7477-4a82-9739-c4beeb006bbd-operator-scripts\") pod \"00c3f957-7477-4a82-9739-c4beeb006bbd\" (UID: \"00c3f957-7477-4a82-9739-c4beeb006bbd\") " Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.939827 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00c3f957-7477-4a82-9739-c4beeb006bbd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "00c3f957-7477-4a82-9739-c4beeb006bbd" (UID: "00c3f957-7477-4a82-9739-c4beeb006bbd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:09:47 crc kubenswrapper[4941]: I1130 08:09:47.945598 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00c3f957-7477-4a82-9739-c4beeb006bbd-kube-api-access-fwtl7" (OuterVolumeSpecName: "kube-api-access-fwtl7") pod "00c3f957-7477-4a82-9739-c4beeb006bbd" (UID: "00c3f957-7477-4a82-9739-c4beeb006bbd"). InnerVolumeSpecName "kube-api-access-fwtl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.046034 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwtl7\" (UniqueName: \"kubernetes.io/projected/00c3f957-7477-4a82-9739-c4beeb006bbd-kube-api-access-fwtl7\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.046251 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00c3f957-7477-4a82-9739-c4beeb006bbd-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.405121 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5f5b-account-create-update-k9fmn" event={"ID":"00c3f957-7477-4a82-9739-c4beeb006bbd","Type":"ContainerDied","Data":"d3bf45f46f6af96b27fa036b6eb4d50e754056d3a1692942433e1bac4a835d14"} Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.405203 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3bf45f46f6af96b27fa036b6eb4d50e754056d3a1692942433e1bac4a835d14" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.405139 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5f5b-account-create-update-k9fmn" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.764253 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.862766 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-operator-scripts\") pod \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.862997 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ww9n\" (UniqueName: \"kubernetes.io/projected/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-kube-api-access-7ww9n\") pod \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\" (UID: \"2c22aeb0-d80f-41b2-b261-6d8105dc83b8\") " Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.863409 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2c22aeb0-d80f-41b2-b261-6d8105dc83b8" (UID: "2c22aeb0-d80f-41b2-b261-6d8105dc83b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.863746 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.871569 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-kube-api-access-7ww9n" (OuterVolumeSpecName: "kube-api-access-7ww9n") pod "2c22aeb0-d80f-41b2-b261-6d8105dc83b8" (UID: "2c22aeb0-d80f-41b2-b261-6d8105dc83b8"). InnerVolumeSpecName "kube-api-access-7ww9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:48 crc kubenswrapper[4941]: I1130 08:09:48.966016 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ww9n\" (UniqueName: \"kubernetes.io/projected/2c22aeb0-d80f-41b2-b261-6d8105dc83b8-kube-api-access-7ww9n\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:49 crc kubenswrapper[4941]: I1130 08:09:49.421491 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-gpqrl" event={"ID":"2c22aeb0-d80f-41b2-b261-6d8105dc83b8","Type":"ContainerDied","Data":"705e83937a51a4090c4f04c5c3566b0e3196e9d7fdc54459160d9484504946b7"} Nov 30 08:09:49 crc kubenswrapper[4941]: I1130 08:09:49.421551 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="705e83937a51a4090c4f04c5c3566b0e3196e9d7fdc54459160d9484504946b7" Nov 30 08:09:49 crc kubenswrapper[4941]: I1130 08:09:49.421647 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-gpqrl" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.068396 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-ct89q"] Nov 30 08:09:50 crc kubenswrapper[4941]: E1130 08:09:50.069263 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c22aeb0-d80f-41b2-b261-6d8105dc83b8" containerName="mariadb-database-create" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.069279 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c22aeb0-d80f-41b2-b261-6d8105dc83b8" containerName="mariadb-database-create" Nov 30 08:09:50 crc kubenswrapper[4941]: E1130 08:09:50.069292 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00c3f957-7477-4a82-9739-c4beeb006bbd" containerName="mariadb-account-create-update" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.069300 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="00c3f957-7477-4a82-9739-c4beeb006bbd" containerName="mariadb-account-create-update" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.069488 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="00c3f957-7477-4a82-9739-c4beeb006bbd" containerName="mariadb-account-create-update" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.069520 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c22aeb0-d80f-41b2-b261-6d8105dc83b8" containerName="mariadb-database-create" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.070207 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.072434 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.072851 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-jlcd4" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.073192 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.079584 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c7bfb66bc-pwc2c"] Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.081270 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.100712 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c7bfb66bc-pwc2c"] Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.113394 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-ct89q"] Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.196897 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-scripts\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.196940 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg6kh\" (UniqueName: \"kubernetes.io/projected/926092ac-cdc5-4ad0-b9ec-bbd380355254-kube-api-access-zg6kh\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.196973 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-nb\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197011 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-config-data\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197054 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-config\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197088 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-sb\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197139 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65bfx\" (UniqueName: \"kubernetes.io/projected/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-kube-api-access-65bfx\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197173 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-combined-ca-bundle\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197206 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/926092ac-cdc5-4ad0-b9ec-bbd380355254-logs\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.197228 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-dns-svc\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298765 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-combined-ca-bundle\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298838 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/926092ac-cdc5-4ad0-b9ec-bbd380355254-logs\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298865 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-dns-svc\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298896 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-scripts\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298914 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg6kh\" (UniqueName: \"kubernetes.io/projected/926092ac-cdc5-4ad0-b9ec-bbd380355254-kube-api-access-zg6kh\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298937 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-nb\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.298971 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-config-data\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.299006 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-config\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.299055 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-sb\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.299082 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65bfx\" (UniqueName: \"kubernetes.io/projected/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-kube-api-access-65bfx\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.301026 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/926092ac-cdc5-4ad0-b9ec-bbd380355254-logs\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.303223 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-config\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.303225 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-nb\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.304158 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-scripts\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.304219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-sb\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.304641 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-dns-svc\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.304789 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-combined-ca-bundle\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.307932 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-config-data\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.317603 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg6kh\" (UniqueName: \"kubernetes.io/projected/926092ac-cdc5-4ad0-b9ec-bbd380355254-kube-api-access-zg6kh\") pod \"placement-db-sync-ct89q\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.319160 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65bfx\" (UniqueName: \"kubernetes.io/projected/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-kube-api-access-65bfx\") pod \"dnsmasq-dns-7c7bfb66bc-pwc2c\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.412739 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.423908 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.980880 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-ct89q"] Nov 30 08:09:50 crc kubenswrapper[4941]: W1130 08:09:50.982103 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod926092ac_cdc5_4ad0_b9ec_bbd380355254.slice/crio-454f7b96a4c7dcc945e08c2801b054dd457573d0fed205fa2997e297ef161a46 WatchSource:0}: Error finding container 454f7b96a4c7dcc945e08c2801b054dd457573d0fed205fa2997e297ef161a46: Status 404 returned error can't find the container with id 454f7b96a4c7dcc945e08c2801b054dd457573d0fed205fa2997e297ef161a46 Nov 30 08:09:50 crc kubenswrapper[4941]: I1130 08:09:50.985512 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:09:51 crc kubenswrapper[4941]: I1130 08:09:51.036434 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c7bfb66bc-pwc2c"] Nov 30 08:09:51 crc kubenswrapper[4941]: I1130 08:09:51.442394 4941 generic.go:334] "Generic (PLEG): container finished" podID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerID="d9e96df571dfad7f30d59f6e09eb1fb8147a1117d30ecbcf0e2b8690b6905487" exitCode=0 Nov 30 08:09:51 crc kubenswrapper[4941]: I1130 08:09:51.442552 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" event={"ID":"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5","Type":"ContainerDied","Data":"d9e96df571dfad7f30d59f6e09eb1fb8147a1117d30ecbcf0e2b8690b6905487"} Nov 30 08:09:51 crc kubenswrapper[4941]: I1130 08:09:51.442955 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" event={"ID":"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5","Type":"ContainerStarted","Data":"b27a36cab1a2f454705537eaaaa8d271098f14799eebb56858bce7cf7c0df970"} Nov 30 08:09:51 crc kubenswrapper[4941]: I1130 08:09:51.446635 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ct89q" event={"ID":"926092ac-cdc5-4ad0-b9ec-bbd380355254","Type":"ContainerStarted","Data":"454f7b96a4c7dcc945e08c2801b054dd457573d0fed205fa2997e297ef161a46"} Nov 30 08:09:52 crc kubenswrapper[4941]: I1130 08:09:52.461757 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" event={"ID":"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5","Type":"ContainerStarted","Data":"6cdf13dfefe4df81e69106ff8df5b157d1c3f2b706653e88321ef1f501ad14fd"} Nov 30 08:09:52 crc kubenswrapper[4941]: I1130 08:09:52.462322 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:09:52 crc kubenswrapper[4941]: I1130 08:09:52.490680 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" podStartSLOduration=2.490654997 podStartE2EDuration="2.490654997s" podCreationTimestamp="2025-11-30 08:09:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:09:52.486021073 +0000 UTC m=+5013.254192692" watchObservedRunningTime="2025-11-30 08:09:52.490654997 +0000 UTC m=+5013.258826616" Nov 30 08:09:55 crc kubenswrapper[4941]: I1130 08:09:55.499729 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ct89q" event={"ID":"926092ac-cdc5-4ad0-b9ec-bbd380355254","Type":"ContainerStarted","Data":"5e2a316df35969303600a64df9dd4bb42597798100a3c671963edba1622a719f"} Nov 30 08:09:55 crc kubenswrapper[4941]: I1130 08:09:55.538255 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-ct89q" podStartSLOduration=2.174654163 podStartE2EDuration="5.538215613s" podCreationTimestamp="2025-11-30 08:09:50 +0000 UTC" firstStartedPulling="2025-11-30 08:09:50.985194986 +0000 UTC m=+5011.753366595" lastFinishedPulling="2025-11-30 08:09:54.348756436 +0000 UTC m=+5015.116928045" observedRunningTime="2025-11-30 08:09:55.526898093 +0000 UTC m=+5016.295069742" watchObservedRunningTime="2025-11-30 08:09:55.538215613 +0000 UTC m=+5016.306387222" Nov 30 08:09:56 crc kubenswrapper[4941]: I1130 08:09:56.515533 4941 generic.go:334] "Generic (PLEG): container finished" podID="926092ac-cdc5-4ad0-b9ec-bbd380355254" containerID="5e2a316df35969303600a64df9dd4bb42597798100a3c671963edba1622a719f" exitCode=0 Nov 30 08:09:56 crc kubenswrapper[4941]: I1130 08:09:56.515603 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ct89q" event={"ID":"926092ac-cdc5-4ad0-b9ec-bbd380355254","Type":"ContainerDied","Data":"5e2a316df35969303600a64df9dd4bb42597798100a3c671963edba1622a719f"} Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.900651 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.979737 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zg6kh\" (UniqueName: \"kubernetes.io/projected/926092ac-cdc5-4ad0-b9ec-bbd380355254-kube-api-access-zg6kh\") pod \"926092ac-cdc5-4ad0-b9ec-bbd380355254\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.979908 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-scripts\") pod \"926092ac-cdc5-4ad0-b9ec-bbd380355254\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.980056 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-config-data\") pod \"926092ac-cdc5-4ad0-b9ec-bbd380355254\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.980137 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/926092ac-cdc5-4ad0-b9ec-bbd380355254-logs\") pod \"926092ac-cdc5-4ad0-b9ec-bbd380355254\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.980187 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-combined-ca-bundle\") pod \"926092ac-cdc5-4ad0-b9ec-bbd380355254\" (UID: \"926092ac-cdc5-4ad0-b9ec-bbd380355254\") " Nov 30 08:09:57 crc kubenswrapper[4941]: I1130 08:09:57.981191 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/926092ac-cdc5-4ad0-b9ec-bbd380355254-logs" (OuterVolumeSpecName: "logs") pod "926092ac-cdc5-4ad0-b9ec-bbd380355254" (UID: "926092ac-cdc5-4ad0-b9ec-bbd380355254"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.083213 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/926092ac-cdc5-4ad0-b9ec-bbd380355254-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.542500 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-ct89q" event={"ID":"926092ac-cdc5-4ad0-b9ec-bbd380355254","Type":"ContainerDied","Data":"454f7b96a4c7dcc945e08c2801b054dd457573d0fed205fa2997e297ef161a46"} Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.542545 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="454f7b96a4c7dcc945e08c2801b054dd457573d0fed205fa2997e297ef161a46" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.542611 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-ct89q" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.590641 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-scripts" (OuterVolumeSpecName: "scripts") pod "926092ac-cdc5-4ad0-b9ec-bbd380355254" (UID: "926092ac-cdc5-4ad0-b9ec-bbd380355254"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.590677 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/926092ac-cdc5-4ad0-b9ec-bbd380355254-kube-api-access-zg6kh" (OuterVolumeSpecName: "kube-api-access-zg6kh") pod "926092ac-cdc5-4ad0-b9ec-bbd380355254" (UID: "926092ac-cdc5-4ad0-b9ec-bbd380355254"). InnerVolumeSpecName "kube-api-access-zg6kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.595118 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.595151 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zg6kh\" (UniqueName: \"kubernetes.io/projected/926092ac-cdc5-4ad0-b9ec-bbd380355254-kube-api-access-zg6kh\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.619522 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-config-data" (OuterVolumeSpecName: "config-data") pod "926092ac-cdc5-4ad0-b9ec-bbd380355254" (UID: "926092ac-cdc5-4ad0-b9ec-bbd380355254"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.625526 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "926092ac-cdc5-4ad0-b9ec-bbd380355254" (UID: "926092ac-cdc5-4ad0-b9ec-bbd380355254"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.631061 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-95bccfd8-jcbmg"] Nov 30 08:09:58 crc kubenswrapper[4941]: E1130 08:09:58.631736 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="926092ac-cdc5-4ad0-b9ec-bbd380355254" containerName="placement-db-sync" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.631844 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="926092ac-cdc5-4ad0-b9ec-bbd380355254" containerName="placement-db-sync" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.632217 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="926092ac-cdc5-4ad0-b9ec-bbd380355254" containerName="placement-db-sync" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.637580 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.648665 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-95bccfd8-jcbmg"] Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.696518 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-scripts\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.696690 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g65tp\" (UniqueName: \"kubernetes.io/projected/64e9210e-a153-46f5-91d1-227beb120823-kube-api-access-g65tp\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.696801 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-combined-ca-bundle\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.696957 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-config-data\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.697070 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e9210e-a153-46f5-91d1-227beb120823-logs\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.697196 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.697351 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/926092ac-cdc5-4ad0-b9ec-bbd380355254-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.798664 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-scripts\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.798995 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g65tp\" (UniqueName: \"kubernetes.io/projected/64e9210e-a153-46f5-91d1-227beb120823-kube-api-access-g65tp\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.799111 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-combined-ca-bundle\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.799243 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-config-data\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.799358 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e9210e-a153-46f5-91d1-227beb120823-logs\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.799860 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e9210e-a153-46f5-91d1-227beb120823-logs\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.803585 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-scripts\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.804210 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-config-data\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.804909 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e9210e-a153-46f5-91d1-227beb120823-combined-ca-bundle\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:58 crc kubenswrapper[4941]: I1130 08:09:58.821224 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g65tp\" (UniqueName: \"kubernetes.io/projected/64e9210e-a153-46f5-91d1-227beb120823-kube-api-access-g65tp\") pod \"placement-95bccfd8-jcbmg\" (UID: \"64e9210e-a153-46f5-91d1-227beb120823\") " pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:59 crc kubenswrapper[4941]: I1130 08:09:59.001251 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:09:59 crc kubenswrapper[4941]: I1130 08:09:59.503300 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-95bccfd8-jcbmg"] Nov 30 08:09:59 crc kubenswrapper[4941]: I1130 08:09:59.565570 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-95bccfd8-jcbmg" event={"ID":"64e9210e-a153-46f5-91d1-227beb120823","Type":"ContainerStarted","Data":"e46fe739ff559a6cbb345e7525a9efaf297a39ce596f38a93d99ef9667b591c2"} Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.425532 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.536002 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76cdfb6c59-wzd9h"] Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.536348 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerName="dnsmasq-dns" containerID="cri-o://8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997" gracePeriod=10 Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.593551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-95bccfd8-jcbmg" event={"ID":"64e9210e-a153-46f5-91d1-227beb120823","Type":"ContainerStarted","Data":"7c96c8610db757cf376d2bf4737de60e915ec899e42e896fac52a29c87d0b986"} Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.593602 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-95bccfd8-jcbmg" event={"ID":"64e9210e-a153-46f5-91d1-227beb120823","Type":"ContainerStarted","Data":"9029230053bd736c937538289de229059010b75975e71a2ab940dcb97c2005be"} Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.594974 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.595029 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:10:00 crc kubenswrapper[4941]: I1130 08:10:00.621778 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-95bccfd8-jcbmg" podStartSLOduration=2.621761375 podStartE2EDuration="2.621761375s" podCreationTimestamp="2025-11-30 08:09:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:10:00.616281595 +0000 UTC m=+5021.384453214" watchObservedRunningTime="2025-11-30 08:10:00.621761375 +0000 UTC m=+5021.389932974" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.285807 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.407156 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-dns-svc\") pod \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.407247 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-config\") pod \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.407356 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-nb\") pod \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.407378 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-sb\") pod \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.407407 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxhnr\" (UniqueName: \"kubernetes.io/projected/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-kube-api-access-zxhnr\") pod \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\" (UID: \"389464a4-94f6-4b32-ad06-1d05ec3c7ef3\") " Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.421548 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-kube-api-access-zxhnr" (OuterVolumeSpecName: "kube-api-access-zxhnr") pod "389464a4-94f6-4b32-ad06-1d05ec3c7ef3" (UID: "389464a4-94f6-4b32-ad06-1d05ec3c7ef3"). InnerVolumeSpecName "kube-api-access-zxhnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.452706 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "389464a4-94f6-4b32-ad06-1d05ec3c7ef3" (UID: "389464a4-94f6-4b32-ad06-1d05ec3c7ef3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.456640 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-config" (OuterVolumeSpecName: "config") pod "389464a4-94f6-4b32-ad06-1d05ec3c7ef3" (UID: "389464a4-94f6-4b32-ad06-1d05ec3c7ef3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.458680 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "389464a4-94f6-4b32-ad06-1d05ec3c7ef3" (UID: "389464a4-94f6-4b32-ad06-1d05ec3c7ef3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.468220 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "389464a4-94f6-4b32-ad06-1d05ec3c7ef3" (UID: "389464a4-94f6-4b32-ad06-1d05ec3c7ef3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.509752 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxhnr\" (UniqueName: \"kubernetes.io/projected/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-kube-api-access-zxhnr\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.509801 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.509811 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.509822 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.509835 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/389464a4-94f6-4b32-ad06-1d05ec3c7ef3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.607021 4941 generic.go:334] "Generic (PLEG): container finished" podID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerID="8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997" exitCode=0 Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.607171 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" event={"ID":"389464a4-94f6-4b32-ad06-1d05ec3c7ef3","Type":"ContainerDied","Data":"8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997"} Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.607293 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" event={"ID":"389464a4-94f6-4b32-ad06-1d05ec3c7ef3","Type":"ContainerDied","Data":"b88a0894054f9dbe033c8e147662d5156c3b72775c7be8354dab424a01333b86"} Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.607373 4941 scope.go:117] "RemoveContainer" containerID="8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.607895 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cdfb6c59-wzd9h" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.638405 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76cdfb6c59-wzd9h"] Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.644253 4941 scope.go:117] "RemoveContainer" containerID="1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.646149 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76cdfb6c59-wzd9h"] Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.677411 4941 scope.go:117] "RemoveContainer" containerID="8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997" Nov 30 08:10:01 crc kubenswrapper[4941]: E1130 08:10:01.677998 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997\": container with ID starting with 8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997 not found: ID does not exist" containerID="8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.678041 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997"} err="failed to get container status \"8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997\": rpc error: code = NotFound desc = could not find container \"8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997\": container with ID starting with 8991b85fc024a249d968d33cbe4213cfa6a9787c3e43c38e3c9fca921e850997 not found: ID does not exist" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.678081 4941 scope.go:117] "RemoveContainer" containerID="1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03" Nov 30 08:10:01 crc kubenswrapper[4941]: E1130 08:10:01.679170 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03\": container with ID starting with 1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03 not found: ID does not exist" containerID="1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03" Nov 30 08:10:01 crc kubenswrapper[4941]: I1130 08:10:01.679204 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03"} err="failed to get container status \"1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03\": rpc error: code = NotFound desc = could not find container \"1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03\": container with ID starting with 1d646c4291443dae9568d133273f1f49088762720d7fc75a58293e1298a59d03 not found: ID does not exist" Nov 30 08:10:02 crc kubenswrapper[4941]: I1130 08:10:02.978627 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:10:02 crc kubenswrapper[4941]: I1130 08:10:02.979081 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:10:03 crc kubenswrapper[4941]: I1130 08:10:03.543587 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" path="/var/lib/kubelet/pods/389464a4-94f6-4b32-ad06-1d05ec3c7ef3/volumes" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.762490 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lzzkz"] Nov 30 08:10:11 crc kubenswrapper[4941]: E1130 08:10:11.763993 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerName="init" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.764019 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerName="init" Nov 30 08:10:11 crc kubenswrapper[4941]: E1130 08:10:11.764063 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerName="dnsmasq-dns" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.764075 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerName="dnsmasq-dns" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.764403 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="389464a4-94f6-4b32-ad06-1d05ec3c7ef3" containerName="dnsmasq-dns" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.766313 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.787251 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lzzkz"] Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.936943 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-catalog-content\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.937032 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcqf7\" (UniqueName: \"kubernetes.io/projected/dbee6666-0b15-4a02-a39c-482ca66c3341-kube-api-access-pcqf7\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:11 crc kubenswrapper[4941]: I1130 08:10:11.937123 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-utilities\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.039883 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-catalog-content\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.039961 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcqf7\" (UniqueName: \"kubernetes.io/projected/dbee6666-0b15-4a02-a39c-482ca66c3341-kube-api-access-pcqf7\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.040038 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-utilities\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.040885 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-catalog-content\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.040890 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-utilities\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.075586 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcqf7\" (UniqueName: \"kubernetes.io/projected/dbee6666-0b15-4a02-a39c-482ca66c3341-kube-api-access-pcqf7\") pod \"redhat-operators-lzzkz\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:12 crc kubenswrapper[4941]: I1130 08:10:12.098222 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:13 crc kubenswrapper[4941]: I1130 08:10:13.519283 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lzzkz"] Nov 30 08:10:13 crc kubenswrapper[4941]: W1130 08:10:13.532253 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbee6666_0b15_4a02_a39c_482ca66c3341.slice/crio-2acdc88428badb8812d3572f38ab464bc7ea11cd2c95151d4cf7571a633ddafd WatchSource:0}: Error finding container 2acdc88428badb8812d3572f38ab464bc7ea11cd2c95151d4cf7571a633ddafd: Status 404 returned error can't find the container with id 2acdc88428badb8812d3572f38ab464bc7ea11cd2c95151d4cf7571a633ddafd Nov 30 08:10:13 crc kubenswrapper[4941]: I1130 08:10:13.746099 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerStarted","Data":"2acdc88428badb8812d3572f38ab464bc7ea11cd2c95151d4cf7571a633ddafd"} Nov 30 08:10:14 crc kubenswrapper[4941]: I1130 08:10:14.762957 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerID="0053fb3121ebd7a243c7e948f50ba0f529329dfaf88ee4610444330a9729fde2" exitCode=0 Nov 30 08:10:14 crc kubenswrapper[4941]: I1130 08:10:14.763118 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerDied","Data":"0053fb3121ebd7a243c7e948f50ba0f529329dfaf88ee4610444330a9729fde2"} Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.528158 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pqpwf"] Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.533600 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.540646 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pqpwf"] Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.636636 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-catalog-content\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.636693 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hknbf\" (UniqueName: \"kubernetes.io/projected/51b404a0-43d1-4e83-93cd-ddb9b509acac-kube-api-access-hknbf\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.636802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-utilities\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.738292 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-utilities\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.738487 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-catalog-content\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.738523 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hknbf\" (UniqueName: \"kubernetes.io/projected/51b404a0-43d1-4e83-93cd-ddb9b509acac-kube-api-access-hknbf\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.738995 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-utilities\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.739177 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-catalog-content\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.765222 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hknbf\" (UniqueName: \"kubernetes.io/projected/51b404a0-43d1-4e83-93cd-ddb9b509acac-kube-api-access-hknbf\") pod \"community-operators-pqpwf\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.785502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerStarted","Data":"f64432f3b6da93f2d6774d11cbacce7efb888e7d9cc8ebf5a35ff50811f3e249"} Nov 30 08:10:16 crc kubenswrapper[4941]: I1130 08:10:16.872922 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:17 crc kubenswrapper[4941]: I1130 08:10:17.453908 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pqpwf"] Nov 30 08:10:17 crc kubenswrapper[4941]: I1130 08:10:17.798664 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqpwf" event={"ID":"51b404a0-43d1-4e83-93cd-ddb9b509acac","Type":"ContainerStarted","Data":"6ae4f35855b9d906974d8798bd21c7bec5e8b61ce12e77a4e53e12f65e065fa5"} Nov 30 08:10:17 crc kubenswrapper[4941]: I1130 08:10:17.804209 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerID="f64432f3b6da93f2d6774d11cbacce7efb888e7d9cc8ebf5a35ff50811f3e249" exitCode=0 Nov 30 08:10:17 crc kubenswrapper[4941]: I1130 08:10:17.804295 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerDied","Data":"f64432f3b6da93f2d6774d11cbacce7efb888e7d9cc8ebf5a35ff50811f3e249"} Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.821476 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerStarted","Data":"2cfe5c325890f906a6ca6de3d6081958935e66096a21872a47ee62705bfc7a31"} Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.826458 4941 generic.go:334] "Generic (PLEG): container finished" podID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerID="7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b" exitCode=0 Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.826540 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqpwf" event={"ID":"51b404a0-43d1-4e83-93cd-ddb9b509acac","Type":"ContainerDied","Data":"7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b"} Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.866689 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lzzkz" podStartSLOduration=4.157839531 podStartE2EDuration="7.866650121s" podCreationTimestamp="2025-11-30 08:10:11 +0000 UTC" firstStartedPulling="2025-11-30 08:10:14.766119736 +0000 UTC m=+5035.534291385" lastFinishedPulling="2025-11-30 08:10:18.474930336 +0000 UTC m=+5039.243101975" observedRunningTime="2025-11-30 08:10:18.844958501 +0000 UTC m=+5039.613130230" watchObservedRunningTime="2025-11-30 08:10:18.866650121 +0000 UTC m=+5039.634821740" Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.942687 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hrqqq"] Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.948160 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:18 crc kubenswrapper[4941]: I1130 08:10:18.958606 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hrqqq"] Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.097164 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-catalog-content\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.097376 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-utilities\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.097716 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5rls\" (UniqueName: \"kubernetes.io/projected/77f49af3-564f-4cc7-9a85-9761952f2055-kube-api-access-r5rls\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.200218 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-utilities\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.200381 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5rls\" (UniqueName: \"kubernetes.io/projected/77f49af3-564f-4cc7-9a85-9761952f2055-kube-api-access-r5rls\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.200428 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-catalog-content\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.201041 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-utilities\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.201093 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-catalog-content\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.222807 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5rls\" (UniqueName: \"kubernetes.io/projected/77f49af3-564f-4cc7-9a85-9761952f2055-kube-api-access-r5rls\") pod \"certified-operators-hrqqq\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.276583 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.780581 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hrqqq"] Nov 30 08:10:19 crc kubenswrapper[4941]: W1130 08:10:19.795448 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77f49af3_564f_4cc7_9a85_9761952f2055.slice/crio-26e018d900a5ed8f95e1f87b3ebba9f425f36e821c40dcd6fd8c506eff2bb326 WatchSource:0}: Error finding container 26e018d900a5ed8f95e1f87b3ebba9f425f36e821c40dcd6fd8c506eff2bb326: Status 404 returned error can't find the container with id 26e018d900a5ed8f95e1f87b3ebba9f425f36e821c40dcd6fd8c506eff2bb326 Nov 30 08:10:19 crc kubenswrapper[4941]: I1130 08:10:19.840805 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerStarted","Data":"26e018d900a5ed8f95e1f87b3ebba9f425f36e821c40dcd6fd8c506eff2bb326"} Nov 30 08:10:20 crc kubenswrapper[4941]: I1130 08:10:20.851879 4941 generic.go:334] "Generic (PLEG): container finished" podID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerID="9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21" exitCode=0 Nov 30 08:10:20 crc kubenswrapper[4941]: I1130 08:10:20.851945 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqpwf" event={"ID":"51b404a0-43d1-4e83-93cd-ddb9b509acac","Type":"ContainerDied","Data":"9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21"} Nov 30 08:10:20 crc kubenswrapper[4941]: I1130 08:10:20.857018 4941 generic.go:334] "Generic (PLEG): container finished" podID="77f49af3-564f-4cc7-9a85-9761952f2055" containerID="fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33" exitCode=0 Nov 30 08:10:20 crc kubenswrapper[4941]: I1130 08:10:20.857080 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerDied","Data":"fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33"} Nov 30 08:10:21 crc kubenswrapper[4941]: I1130 08:10:21.871406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqpwf" event={"ID":"51b404a0-43d1-4e83-93cd-ddb9b509acac","Type":"ContainerStarted","Data":"bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd"} Nov 30 08:10:21 crc kubenswrapper[4941]: I1130 08:10:21.874434 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerStarted","Data":"a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a"} Nov 30 08:10:21 crc kubenswrapper[4941]: I1130 08:10:21.903657 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pqpwf" podStartSLOduration=3.444347554 podStartE2EDuration="5.903634081s" podCreationTimestamp="2025-11-30 08:10:16 +0000 UTC" firstStartedPulling="2025-11-30 08:10:18.828929545 +0000 UTC m=+5039.597101174" lastFinishedPulling="2025-11-30 08:10:21.288216092 +0000 UTC m=+5042.056387701" observedRunningTime="2025-11-30 08:10:21.903483976 +0000 UTC m=+5042.671655585" watchObservedRunningTime="2025-11-30 08:10:21.903634081 +0000 UTC m=+5042.671805690" Nov 30 08:10:22 crc kubenswrapper[4941]: I1130 08:10:22.098612 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:22 crc kubenswrapper[4941]: I1130 08:10:22.098995 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:22 crc kubenswrapper[4941]: I1130 08:10:22.888938 4941 generic.go:334] "Generic (PLEG): container finished" podID="77f49af3-564f-4cc7-9a85-9761952f2055" containerID="a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a" exitCode=0 Nov 30 08:10:22 crc kubenswrapper[4941]: I1130 08:10:22.889137 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerDied","Data":"a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a"} Nov 30 08:10:23 crc kubenswrapper[4941]: I1130 08:10:23.160638 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lzzkz" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="registry-server" probeResult="failure" output=< Nov 30 08:10:23 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:10:23 crc kubenswrapper[4941]: > Nov 30 08:10:23 crc kubenswrapper[4941]: I1130 08:10:23.902739 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerStarted","Data":"9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859"} Nov 30 08:10:23 crc kubenswrapper[4941]: I1130 08:10:23.927080 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hrqqq" podStartSLOduration=3.226988261 podStartE2EDuration="5.927057698s" podCreationTimestamp="2025-11-30 08:10:18 +0000 UTC" firstStartedPulling="2025-11-30 08:10:20.859094202 +0000 UTC m=+5041.627265811" lastFinishedPulling="2025-11-30 08:10:23.559163639 +0000 UTC m=+5044.327335248" observedRunningTime="2025-11-30 08:10:23.923615312 +0000 UTC m=+5044.691786921" watchObservedRunningTime="2025-11-30 08:10:23.927057698 +0000 UTC m=+5044.695229307" Nov 30 08:10:26 crc kubenswrapper[4941]: I1130 08:10:26.873881 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:26 crc kubenswrapper[4941]: I1130 08:10:26.874569 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:27 crc kubenswrapper[4941]: I1130 08:10:27.547959 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:27 crc kubenswrapper[4941]: I1130 08:10:27.613102 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:29 crc kubenswrapper[4941]: I1130 08:10:29.277313 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:29 crc kubenswrapper[4941]: I1130 08:10:29.277714 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:29 crc kubenswrapper[4941]: I1130 08:10:29.333708 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.036287 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.127053 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.131501 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-95bccfd8-jcbmg" Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.319541 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pqpwf"] Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.319826 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pqpwf" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="registry-server" containerID="cri-o://bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd" gracePeriod=2 Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.882247 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.983069 4941 generic.go:334] "Generic (PLEG): container finished" podID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerID="bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd" exitCode=0 Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.984395 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pqpwf" Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.986283 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqpwf" event={"ID":"51b404a0-43d1-4e83-93cd-ddb9b509acac","Type":"ContainerDied","Data":"bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd"} Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.986347 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pqpwf" event={"ID":"51b404a0-43d1-4e83-93cd-ddb9b509acac","Type":"ContainerDied","Data":"6ae4f35855b9d906974d8798bd21c7bec5e8b61ce12e77a4e53e12f65e065fa5"} Nov 30 08:10:30 crc kubenswrapper[4941]: I1130 08:10:30.986369 4941 scope.go:117] "RemoveContainer" containerID="bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.014751 4941 scope.go:117] "RemoveContainer" containerID="9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.054843 4941 scope.go:117] "RemoveContainer" containerID="7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.067793 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-catalog-content\") pod \"51b404a0-43d1-4e83-93cd-ddb9b509acac\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.067934 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-utilities\") pod \"51b404a0-43d1-4e83-93cd-ddb9b509acac\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.067975 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hknbf\" (UniqueName: \"kubernetes.io/projected/51b404a0-43d1-4e83-93cd-ddb9b509acac-kube-api-access-hknbf\") pod \"51b404a0-43d1-4e83-93cd-ddb9b509acac\" (UID: \"51b404a0-43d1-4e83-93cd-ddb9b509acac\") " Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.068877 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-utilities" (OuterVolumeSpecName: "utilities") pod "51b404a0-43d1-4e83-93cd-ddb9b509acac" (UID: "51b404a0-43d1-4e83-93cd-ddb9b509acac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.080081 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51b404a0-43d1-4e83-93cd-ddb9b509acac-kube-api-access-hknbf" (OuterVolumeSpecName: "kube-api-access-hknbf") pod "51b404a0-43d1-4e83-93cd-ddb9b509acac" (UID: "51b404a0-43d1-4e83-93cd-ddb9b509acac"). InnerVolumeSpecName "kube-api-access-hknbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.103116 4941 scope.go:117] "RemoveContainer" containerID="bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd" Nov 30 08:10:31 crc kubenswrapper[4941]: E1130 08:10:31.103827 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd\": container with ID starting with bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd not found: ID does not exist" containerID="bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.103890 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd"} err="failed to get container status \"bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd\": rpc error: code = NotFound desc = could not find container \"bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd\": container with ID starting with bfe9b8533a29b326170a2cd3dacb0d381ae894fcf818cf7dcd1d76c4b301e0bd not found: ID does not exist" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.103929 4941 scope.go:117] "RemoveContainer" containerID="9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21" Nov 30 08:10:31 crc kubenswrapper[4941]: E1130 08:10:31.104315 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21\": container with ID starting with 9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21 not found: ID does not exist" containerID="9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.104364 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21"} err="failed to get container status \"9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21\": rpc error: code = NotFound desc = could not find container \"9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21\": container with ID starting with 9733ce494c65d19959b52274bc91776228db8b26b19a841bd6cabeca8a2dbf21 not found: ID does not exist" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.104381 4941 scope.go:117] "RemoveContainer" containerID="7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b" Nov 30 08:10:31 crc kubenswrapper[4941]: E1130 08:10:31.105308 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b\": container with ID starting with 7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b not found: ID does not exist" containerID="7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.105359 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b"} err="failed to get container status \"7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b\": rpc error: code = NotFound desc = could not find container \"7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b\": container with ID starting with 7ed04f34d3b380e9f41ded58b99a148c624ad73f81f758b7d12c38e3671cd45b not found: ID does not exist" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.131590 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51b404a0-43d1-4e83-93cd-ddb9b509acac" (UID: "51b404a0-43d1-4e83-93cd-ddb9b509acac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.170612 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.170649 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51b404a0-43d1-4e83-93cd-ddb9b509acac-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.170660 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hknbf\" (UniqueName: \"kubernetes.io/projected/51b404a0-43d1-4e83-93cd-ddb9b509acac-kube-api-access-hknbf\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.317896 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hrqqq"] Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.340456 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pqpwf"] Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.347406 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pqpwf"] Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.535319 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" path="/var/lib/kubelet/pods/51b404a0-43d1-4e83-93cd-ddb9b509acac/volumes" Nov 30 08:10:31 crc kubenswrapper[4941]: I1130 08:10:31.996665 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hrqqq" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="registry-server" containerID="cri-o://9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859" gracePeriod=2 Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.163226 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.247658 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.530507 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.700659 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-utilities\") pod \"77f49af3-564f-4cc7-9a85-9761952f2055\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.700999 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5rls\" (UniqueName: \"kubernetes.io/projected/77f49af3-564f-4cc7-9a85-9761952f2055-kube-api-access-r5rls\") pod \"77f49af3-564f-4cc7-9a85-9761952f2055\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.701040 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-catalog-content\") pod \"77f49af3-564f-4cc7-9a85-9761952f2055\" (UID: \"77f49af3-564f-4cc7-9a85-9761952f2055\") " Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.704117 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-utilities" (OuterVolumeSpecName: "utilities") pod "77f49af3-564f-4cc7-9a85-9761952f2055" (UID: "77f49af3-564f-4cc7-9a85-9761952f2055"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.710409 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77f49af3-564f-4cc7-9a85-9761952f2055-kube-api-access-r5rls" (OuterVolumeSpecName: "kube-api-access-r5rls") pod "77f49af3-564f-4cc7-9a85-9761952f2055" (UID: "77f49af3-564f-4cc7-9a85-9761952f2055"). InnerVolumeSpecName "kube-api-access-r5rls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.779140 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77f49af3-564f-4cc7-9a85-9761952f2055" (UID: "77f49af3-564f-4cc7-9a85-9761952f2055"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.803886 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5rls\" (UniqueName: \"kubernetes.io/projected/77f49af3-564f-4cc7-9a85-9761952f2055-kube-api-access-r5rls\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.803937 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.803951 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77f49af3-564f-4cc7-9a85-9761952f2055-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.979017 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:10:32 crc kubenswrapper[4941]: I1130 08:10:32.979092 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.011165 4941 generic.go:334] "Generic (PLEG): container finished" podID="77f49af3-564f-4cc7-9a85-9761952f2055" containerID="9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859" exitCode=0 Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.011228 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerDied","Data":"9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859"} Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.011295 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hrqqq" event={"ID":"77f49af3-564f-4cc7-9a85-9761952f2055","Type":"ContainerDied","Data":"26e018d900a5ed8f95e1f87b3ebba9f425f36e821c40dcd6fd8c506eff2bb326"} Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.011294 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hrqqq" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.011338 4941 scope.go:117] "RemoveContainer" containerID="9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.054270 4941 scope.go:117] "RemoveContainer" containerID="a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.087876 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hrqqq"] Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.104542 4941 scope.go:117] "RemoveContainer" containerID="fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.121010 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hrqqq"] Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.172576 4941 scope.go:117] "RemoveContainer" containerID="9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859" Nov 30 08:10:33 crc kubenswrapper[4941]: E1130 08:10:33.176458 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859\": container with ID starting with 9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859 not found: ID does not exist" containerID="9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.176515 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859"} err="failed to get container status \"9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859\": rpc error: code = NotFound desc = could not find container \"9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859\": container with ID starting with 9b8a1801f7fd39b8daeb29cb3db1075d7b11044f69603ea0a9df22e0c1219859 not found: ID does not exist" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.176547 4941 scope.go:117] "RemoveContainer" containerID="a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a" Nov 30 08:10:33 crc kubenswrapper[4941]: E1130 08:10:33.185529 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a\": container with ID starting with a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a not found: ID does not exist" containerID="a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.185588 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a"} err="failed to get container status \"a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a\": rpc error: code = NotFound desc = could not find container \"a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a\": container with ID starting with a2e515627a15e74626435c12b56dae5fda2a01e1a5f45ab0c5a40b913c8bac8a not found: ID does not exist" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.185628 4941 scope.go:117] "RemoveContainer" containerID="fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33" Nov 30 08:10:33 crc kubenswrapper[4941]: E1130 08:10:33.192590 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33\": container with ID starting with fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33 not found: ID does not exist" containerID="fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.192669 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33"} err="failed to get container status \"fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33\": rpc error: code = NotFound desc = could not find container \"fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33\": container with ID starting with fb4a56d6a86fb96e9bbf5de3da6c8b681e4839de20f33ffa42620bbd9a449c33 not found: ID does not exist" Nov 30 08:10:33 crc kubenswrapper[4941]: I1130 08:10:33.556719 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" path="/var/lib/kubelet/pods/77f49af3-564f-4cc7-9a85-9761952f2055/volumes" Nov 30 08:10:34 crc kubenswrapper[4941]: I1130 08:10:34.714694 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lzzkz"] Nov 30 08:10:34 crc kubenswrapper[4941]: I1130 08:10:34.715022 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lzzkz" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="registry-server" containerID="cri-o://2cfe5c325890f906a6ca6de3d6081958935e66096a21872a47ee62705bfc7a31" gracePeriod=2 Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.036760 4941 generic.go:334] "Generic (PLEG): container finished" podID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerID="2cfe5c325890f906a6ca6de3d6081958935e66096a21872a47ee62705bfc7a31" exitCode=0 Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.036886 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerDied","Data":"2cfe5c325890f906a6ca6de3d6081958935e66096a21872a47ee62705bfc7a31"} Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.142041 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.186535 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcqf7\" (UniqueName: \"kubernetes.io/projected/dbee6666-0b15-4a02-a39c-482ca66c3341-kube-api-access-pcqf7\") pod \"dbee6666-0b15-4a02-a39c-482ca66c3341\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.186699 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-utilities\") pod \"dbee6666-0b15-4a02-a39c-482ca66c3341\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.187080 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-catalog-content\") pod \"dbee6666-0b15-4a02-a39c-482ca66c3341\" (UID: \"dbee6666-0b15-4a02-a39c-482ca66c3341\") " Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.188854 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-utilities" (OuterVolumeSpecName: "utilities") pod "dbee6666-0b15-4a02-a39c-482ca66c3341" (UID: "dbee6666-0b15-4a02-a39c-482ca66c3341"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.198638 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbee6666-0b15-4a02-a39c-482ca66c3341-kube-api-access-pcqf7" (OuterVolumeSpecName: "kube-api-access-pcqf7") pod "dbee6666-0b15-4a02-a39c-482ca66c3341" (UID: "dbee6666-0b15-4a02-a39c-482ca66c3341"). InnerVolumeSpecName "kube-api-access-pcqf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.288756 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcqf7\" (UniqueName: \"kubernetes.io/projected/dbee6666-0b15-4a02-a39c-482ca66c3341-kube-api-access-pcqf7\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.288792 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.306959 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dbee6666-0b15-4a02-a39c-482ca66c3341" (UID: "dbee6666-0b15-4a02-a39c-482ca66c3341"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:10:35 crc kubenswrapper[4941]: I1130 08:10:35.390442 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbee6666-0b15-4a02-a39c-482ca66c3341-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.055080 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzzkz" event={"ID":"dbee6666-0b15-4a02-a39c-482ca66c3341","Type":"ContainerDied","Data":"2acdc88428badb8812d3572f38ab464bc7ea11cd2c95151d4cf7571a633ddafd"} Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.055153 4941 scope.go:117] "RemoveContainer" containerID="2cfe5c325890f906a6ca6de3d6081958935e66096a21872a47ee62705bfc7a31" Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.055361 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzzkz" Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.092216 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lzzkz"] Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.098643 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lzzkz"] Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.104796 4941 scope.go:117] "RemoveContainer" containerID="f64432f3b6da93f2d6774d11cbacce7efb888e7d9cc8ebf5a35ff50811f3e249" Nov 30 08:10:36 crc kubenswrapper[4941]: I1130 08:10:36.132972 4941 scope.go:117] "RemoveContainer" containerID="0053fb3121ebd7a243c7e948f50ba0f529329dfaf88ee4610444330a9729fde2" Nov 30 08:10:37 crc kubenswrapper[4941]: I1130 08:10:37.543207 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" path="/var/lib/kubelet/pods/dbee6666-0b15-4a02-a39c-482ca66c3341/volumes" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.621433 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-smhpg"] Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622782 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="extract-utilities" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622803 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="extract-utilities" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622819 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="extract-utilities" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622827 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="extract-utilities" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622848 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="extract-utilities" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622857 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="extract-utilities" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622882 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="extract-content" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622892 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="extract-content" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622908 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622917 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622935 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="extract-content" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622944 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="extract-content" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622963 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="extract-content" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622972 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="extract-content" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.622985 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.622993 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: E1130 08:10:56.623008 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.623017 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.623225 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="77f49af3-564f-4cc7-9a85-9761952f2055" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.623241 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbee6666-0b15-4a02-a39c-482ca66c3341" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.623266 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b404a0-43d1-4e83-93cd-ddb9b509acac" containerName="registry-server" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.624134 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.639781 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-smhpg"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.707431 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-x5vrj"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.708662 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.714566 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763a8718-0d67-4fda-afee-a01eebc05063-operator-scripts\") pod \"nova-api-db-create-smhpg\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.714655 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5ngt\" (UniqueName: \"kubernetes.io/projected/763a8718-0d67-4fda-afee-a01eebc05063-kube-api-access-m5ngt\") pod \"nova-api-db-create-smhpg\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.721571 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-x5vrj"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.809107 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-q7wck"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.810836 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.816504 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvl5x\" (UniqueName: \"kubernetes.io/projected/fce27667-dc7c-41ec-837f-c924456e2e1e-kube-api-access-rvl5x\") pod \"nova-cell0-db-create-x5vrj\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.816587 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce27667-dc7c-41ec-837f-c924456e2e1e-operator-scripts\") pod \"nova-cell0-db-create-x5vrj\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.816635 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763a8718-0d67-4fda-afee-a01eebc05063-operator-scripts\") pod \"nova-api-db-create-smhpg\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.816713 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5ngt\" (UniqueName: \"kubernetes.io/projected/763a8718-0d67-4fda-afee-a01eebc05063-kube-api-access-m5ngt\") pod \"nova-api-db-create-smhpg\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.818145 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763a8718-0d67-4fda-afee-a01eebc05063-operator-scripts\") pod \"nova-api-db-create-smhpg\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.831394 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1620-account-create-update-h4vrp"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.833433 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.837703 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.843067 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5ngt\" (UniqueName: \"kubernetes.io/projected/763a8718-0d67-4fda-afee-a01eebc05063-kube-api-access-m5ngt\") pod \"nova-api-db-create-smhpg\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.843176 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-q7wck"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.867646 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1620-account-create-update-h4vrp"] Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.918609 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed29adfa-9314-48f2-a5ce-b0615faace71-operator-scripts\") pod \"nova-api-1620-account-create-update-h4vrp\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.918712 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjk2z\" (UniqueName: \"kubernetes.io/projected/913a4896-fef0-4a24-a143-d99183546680-kube-api-access-kjk2z\") pod \"nova-cell1-db-create-q7wck\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.918779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvl5x\" (UniqueName: \"kubernetes.io/projected/fce27667-dc7c-41ec-837f-c924456e2e1e-kube-api-access-rvl5x\") pod \"nova-cell0-db-create-x5vrj\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.919023 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce27667-dc7c-41ec-837f-c924456e2e1e-operator-scripts\") pod \"nova-cell0-db-create-x5vrj\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.919164 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cdxq\" (UniqueName: \"kubernetes.io/projected/ed29adfa-9314-48f2-a5ce-b0615faace71-kube-api-access-6cdxq\") pod \"nova-api-1620-account-create-update-h4vrp\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.919276 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/913a4896-fef0-4a24-a143-d99183546680-operator-scripts\") pod \"nova-cell1-db-create-q7wck\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.919945 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce27667-dc7c-41ec-837f-c924456e2e1e-operator-scripts\") pod \"nova-cell0-db-create-x5vrj\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.936760 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvl5x\" (UniqueName: \"kubernetes.io/projected/fce27667-dc7c-41ec-837f-c924456e2e1e-kube-api-access-rvl5x\") pod \"nova-cell0-db-create-x5vrj\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:56 crc kubenswrapper[4941]: I1130 08:10:56.943828 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-smhpg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.016606 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-47e9-account-create-update-bgbkb"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.018949 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.021160 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed29adfa-9314-48f2-a5ce-b0615faace71-operator-scripts\") pod \"nova-api-1620-account-create-update-h4vrp\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.021257 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjk2z\" (UniqueName: \"kubernetes.io/projected/913a4896-fef0-4a24-a143-d99183546680-kube-api-access-kjk2z\") pod \"nova-cell1-db-create-q7wck\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.021398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cdxq\" (UniqueName: \"kubernetes.io/projected/ed29adfa-9314-48f2-a5ce-b0615faace71-kube-api-access-6cdxq\") pod \"nova-api-1620-account-create-update-h4vrp\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.021452 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/913a4896-fef0-4a24-a143-d99183546680-operator-scripts\") pod \"nova-cell1-db-create-q7wck\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.021807 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.022823 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/913a4896-fef0-4a24-a143-d99183546680-operator-scripts\") pod \"nova-cell1-db-create-q7wck\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.023182 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed29adfa-9314-48f2-a5ce-b0615faace71-operator-scripts\") pod \"nova-api-1620-account-create-update-h4vrp\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.029167 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-47e9-account-create-update-bgbkb"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.055610 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cdxq\" (UniqueName: \"kubernetes.io/projected/ed29adfa-9314-48f2-a5ce-b0615faace71-kube-api-access-6cdxq\") pod \"nova-api-1620-account-create-update-h4vrp\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.055889 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjk2z\" (UniqueName: \"kubernetes.io/projected/913a4896-fef0-4a24-a143-d99183546680-kube-api-access-kjk2z\") pod \"nova-cell1-db-create-q7wck\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.072688 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.123192 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ghwz\" (UniqueName: \"kubernetes.io/projected/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-kube-api-access-2ghwz\") pod \"nova-cell0-47e9-account-create-update-bgbkb\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.123638 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-operator-scripts\") pod \"nova-cell0-47e9-account-create-update-bgbkb\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.205363 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.205835 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.214347 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-92b9-account-create-update-bvvmg"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.217604 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.225237 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.232487 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ghwz\" (UniqueName: \"kubernetes.io/projected/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-kube-api-access-2ghwz\") pod \"nova-cell0-47e9-account-create-update-bgbkb\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.232632 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-operator-scripts\") pod \"nova-cell0-47e9-account-create-update-bgbkb\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.234436 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-operator-scripts\") pod \"nova-cell0-47e9-account-create-update-bgbkb\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.263388 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-92b9-account-create-update-bvvmg"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.291981 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ghwz\" (UniqueName: \"kubernetes.io/projected/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-kube-api-access-2ghwz\") pod \"nova-cell0-47e9-account-create-update-bgbkb\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.335405 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgp87\" (UniqueName: \"kubernetes.io/projected/5ba195b9-1fb2-446d-9644-271c87d97b4f-kube-api-access-lgp87\") pod \"nova-cell1-92b9-account-create-update-bvvmg\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.335615 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba195b9-1fb2-446d-9644-271c87d97b4f-operator-scripts\") pod \"nova-cell1-92b9-account-create-update-bvvmg\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.362518 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-smhpg"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.437039 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba195b9-1fb2-446d-9644-271c87d97b4f-operator-scripts\") pod \"nova-cell1-92b9-account-create-update-bvvmg\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.437460 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgp87\" (UniqueName: \"kubernetes.io/projected/5ba195b9-1fb2-446d-9644-271c87d97b4f-kube-api-access-lgp87\") pod \"nova-cell1-92b9-account-create-update-bvvmg\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.438606 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba195b9-1fb2-446d-9644-271c87d97b4f-operator-scripts\") pod \"nova-cell1-92b9-account-create-update-bvvmg\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.460321 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgp87\" (UniqueName: \"kubernetes.io/projected/5ba195b9-1fb2-446d-9644-271c87d97b4f-kube-api-access-lgp87\") pod \"nova-cell1-92b9-account-create-update-bvvmg\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.464051 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.565446 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:10:57 crc kubenswrapper[4941]: W1130 08:10:57.658044 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfce27667_dc7c_41ec_837f_c924456e2e1e.slice/crio-d1857b36afd8a88321f734d701efef317d42781c9f270c2deb7ebc30c35b2e7c WatchSource:0}: Error finding container d1857b36afd8a88321f734d701efef317d42781c9f270c2deb7ebc30c35b2e7c: Status 404 returned error can't find the container with id d1857b36afd8a88321f734d701efef317d42781c9f270c2deb7ebc30c35b2e7c Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.658284 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-x5vrj"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.789394 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1620-account-create-update-h4vrp"] Nov 30 08:10:57 crc kubenswrapper[4941]: W1130 08:10:57.790493 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded29adfa_9314_48f2_a5ce_b0615faace71.slice/crio-8d0d12107e404c5f2865163c3ef343ce46c6955d36c22b1c52bee2c7b87ca0b9 WatchSource:0}: Error finding container 8d0d12107e404c5f2865163c3ef343ce46c6955d36c22b1c52bee2c7b87ca0b9: Status 404 returned error can't find the container with id 8d0d12107e404c5f2865163c3ef343ce46c6955d36c22b1c52bee2c7b87ca0b9 Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.877164 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-q7wck"] Nov 30 08:10:57 crc kubenswrapper[4941]: I1130 08:10:57.970201 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-47e9-account-create-update-bgbkb"] Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.080385 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-92b9-account-create-update-bvvmg"] Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.346292 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" event={"ID":"e64460e1-5b29-4c67-8b81-ca53d91dcfd2","Type":"ContainerStarted","Data":"05e3e2f4ee88c02d5581fa82ae5436b62052b050e75e222e8552e8bf08bbd857"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.353004 4941 generic.go:334] "Generic (PLEG): container finished" podID="763a8718-0d67-4fda-afee-a01eebc05063" containerID="e21370cfaeb4aa8e987a00c22289353ec9a183bb03eafdf1e3f5e8b0e80975d3" exitCode=0 Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.353125 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-smhpg" event={"ID":"763a8718-0d67-4fda-afee-a01eebc05063","Type":"ContainerDied","Data":"e21370cfaeb4aa8e987a00c22289353ec9a183bb03eafdf1e3f5e8b0e80975d3"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.353162 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-smhpg" event={"ID":"763a8718-0d67-4fda-afee-a01eebc05063","Type":"ContainerStarted","Data":"799d66f8914c27c58e7ad234e39feefdf7db26552e87c354ea0b748e08d5194c"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.358406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" event={"ID":"5ba195b9-1fb2-446d-9644-271c87d97b4f","Type":"ContainerStarted","Data":"d579438a2fb8d74a039965a6363444a154e2d0989cdf174a5e5b66039edfc62f"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.364689 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1620-account-create-update-h4vrp" event={"ID":"ed29adfa-9314-48f2-a5ce-b0615faace71","Type":"ContainerStarted","Data":"f9af4532560a62efe5cec47ef5f5e3ea4ff956766b2914285856a778fef81eca"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.364756 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1620-account-create-update-h4vrp" event={"ID":"ed29adfa-9314-48f2-a5ce-b0615faace71","Type":"ContainerStarted","Data":"8d0d12107e404c5f2865163c3ef343ce46c6955d36c22b1c52bee2c7b87ca0b9"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.371530 4941 generic.go:334] "Generic (PLEG): container finished" podID="fce27667-dc7c-41ec-837f-c924456e2e1e" containerID="8870350f29ad85874031abf61acb547306c9d0a2b3929e87239ef09ef6971a6e" exitCode=0 Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.371621 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x5vrj" event={"ID":"fce27667-dc7c-41ec-837f-c924456e2e1e","Type":"ContainerDied","Data":"8870350f29ad85874031abf61acb547306c9d0a2b3929e87239ef09ef6971a6e"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.371692 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x5vrj" event={"ID":"fce27667-dc7c-41ec-837f-c924456e2e1e","Type":"ContainerStarted","Data":"d1857b36afd8a88321f734d701efef317d42781c9f270c2deb7ebc30c35b2e7c"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.375008 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-q7wck" event={"ID":"913a4896-fef0-4a24-a143-d99183546680","Type":"ContainerStarted","Data":"061acc3b90e0795eb67275bd780349a6b5e956fc09032b63079c2c8bceecb3ca"} Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.414724 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-1620-account-create-update-h4vrp" podStartSLOduration=2.414694188 podStartE2EDuration="2.414694188s" podCreationTimestamp="2025-11-30 08:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:10:58.388916471 +0000 UTC m=+5079.157088080" watchObservedRunningTime="2025-11-30 08:10:58.414694188 +0000 UTC m=+5079.182865797" Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.417704 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" podStartSLOduration=1.41769405 podStartE2EDuration="1.41769405s" podCreationTimestamp="2025-11-30 08:10:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:10:58.408679752 +0000 UTC m=+5079.176851361" watchObservedRunningTime="2025-11-30 08:10:58.41769405 +0000 UTC m=+5079.185865659" Nov 30 08:10:58 crc kubenswrapper[4941]: I1130 08:10:58.437593 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-q7wck" podStartSLOduration=2.437570965 podStartE2EDuration="2.437570965s" podCreationTimestamp="2025-11-30 08:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:10:58.422271571 +0000 UTC m=+5079.190443180" watchObservedRunningTime="2025-11-30 08:10:58.437570965 +0000 UTC m=+5079.205742564" Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.396783 4941 generic.go:334] "Generic (PLEG): container finished" podID="913a4896-fef0-4a24-a143-d99183546680" containerID="ff4e9686f96b7d995147138eb7a0e7fb842515362838c07ad669c78fcfc00b57" exitCode=0 Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.396971 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-q7wck" event={"ID":"913a4896-fef0-4a24-a143-d99183546680","Type":"ContainerDied","Data":"ff4e9686f96b7d995147138eb7a0e7fb842515362838c07ad669c78fcfc00b57"} Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.402894 4941 generic.go:334] "Generic (PLEG): container finished" podID="e64460e1-5b29-4c67-8b81-ca53d91dcfd2" containerID="f06d52d575bcf7601e7923ad762d59a984f595ee567f7aad19fa4c94ca590620" exitCode=0 Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.403024 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" event={"ID":"e64460e1-5b29-4c67-8b81-ca53d91dcfd2","Type":"ContainerDied","Data":"f06d52d575bcf7601e7923ad762d59a984f595ee567f7aad19fa4c94ca590620"} Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.406174 4941 generic.go:334] "Generic (PLEG): container finished" podID="5ba195b9-1fb2-446d-9644-271c87d97b4f" containerID="b4032330490d57c515799bcb0b80c36b382ba44eae0d8b47a491cbc234f8f188" exitCode=0 Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.406279 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" event={"ID":"5ba195b9-1fb2-446d-9644-271c87d97b4f","Type":"ContainerDied","Data":"b4032330490d57c515799bcb0b80c36b382ba44eae0d8b47a491cbc234f8f188"} Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.409153 4941 generic.go:334] "Generic (PLEG): container finished" podID="ed29adfa-9314-48f2-a5ce-b0615faace71" containerID="f9af4532560a62efe5cec47ef5f5e3ea4ff956766b2914285856a778fef81eca" exitCode=0 Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.409237 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1620-account-create-update-h4vrp" event={"ID":"ed29adfa-9314-48f2-a5ce-b0615faace71","Type":"ContainerDied","Data":"f9af4532560a62efe5cec47ef5f5e3ea4ff956766b2914285856a778fef81eca"} Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.936369 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:10:59 crc kubenswrapper[4941]: I1130 08:10:59.948306 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-smhpg" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.103451 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763a8718-0d67-4fda-afee-a01eebc05063-operator-scripts\") pod \"763a8718-0d67-4fda-afee-a01eebc05063\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.103527 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce27667-dc7c-41ec-837f-c924456e2e1e-operator-scripts\") pod \"fce27667-dc7c-41ec-837f-c924456e2e1e\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.103653 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvl5x\" (UniqueName: \"kubernetes.io/projected/fce27667-dc7c-41ec-837f-c924456e2e1e-kube-api-access-rvl5x\") pod \"fce27667-dc7c-41ec-837f-c924456e2e1e\" (UID: \"fce27667-dc7c-41ec-837f-c924456e2e1e\") " Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.103811 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5ngt\" (UniqueName: \"kubernetes.io/projected/763a8718-0d67-4fda-afee-a01eebc05063-kube-api-access-m5ngt\") pod \"763a8718-0d67-4fda-afee-a01eebc05063\" (UID: \"763a8718-0d67-4fda-afee-a01eebc05063\") " Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.106527 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/763a8718-0d67-4fda-afee-a01eebc05063-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "763a8718-0d67-4fda-afee-a01eebc05063" (UID: "763a8718-0d67-4fda-afee-a01eebc05063"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.106679 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fce27667-dc7c-41ec-837f-c924456e2e1e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fce27667-dc7c-41ec-837f-c924456e2e1e" (UID: "fce27667-dc7c-41ec-837f-c924456e2e1e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.113656 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/763a8718-0d67-4fda-afee-a01eebc05063-kube-api-access-m5ngt" (OuterVolumeSpecName: "kube-api-access-m5ngt") pod "763a8718-0d67-4fda-afee-a01eebc05063" (UID: "763a8718-0d67-4fda-afee-a01eebc05063"). InnerVolumeSpecName "kube-api-access-m5ngt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.113860 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce27667-dc7c-41ec-837f-c924456e2e1e-kube-api-access-rvl5x" (OuterVolumeSpecName: "kube-api-access-rvl5x") pod "fce27667-dc7c-41ec-837f-c924456e2e1e" (UID: "fce27667-dc7c-41ec-837f-c924456e2e1e"). InnerVolumeSpecName "kube-api-access-rvl5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.207842 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763a8718-0d67-4fda-afee-a01eebc05063-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.207893 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce27667-dc7c-41ec-837f-c924456e2e1e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.207914 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvl5x\" (UniqueName: \"kubernetes.io/projected/fce27667-dc7c-41ec-837f-c924456e2e1e-kube-api-access-rvl5x\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.207935 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5ngt\" (UniqueName: \"kubernetes.io/projected/763a8718-0d67-4fda-afee-a01eebc05063-kube-api-access-m5ngt\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.471568 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x5vrj" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.472967 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x5vrj" event={"ID":"fce27667-dc7c-41ec-837f-c924456e2e1e","Type":"ContainerDied","Data":"d1857b36afd8a88321f734d701efef317d42781c9f270c2deb7ebc30c35b2e7c"} Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.473016 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1857b36afd8a88321f734d701efef317d42781c9f270c2deb7ebc30c35b2e7c" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.476485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-smhpg" event={"ID":"763a8718-0d67-4fda-afee-a01eebc05063","Type":"ContainerDied","Data":"799d66f8914c27c58e7ad234e39feefdf7db26552e87c354ea0b748e08d5194c"} Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.476521 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="799d66f8914c27c58e7ad234e39feefdf7db26552e87c354ea0b748e08d5194c" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.476591 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-smhpg" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.873139 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:11:00 crc kubenswrapper[4941]: I1130 08:11:00.994747 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.003202 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.013817 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.028168 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/913a4896-fef0-4a24-a143-d99183546680-operator-scripts\") pod \"913a4896-fef0-4a24-a143-d99183546680\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.028515 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjk2z\" (UniqueName: \"kubernetes.io/projected/913a4896-fef0-4a24-a143-d99183546680-kube-api-access-kjk2z\") pod \"913a4896-fef0-4a24-a143-d99183546680\" (UID: \"913a4896-fef0-4a24-a143-d99183546680\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.032277 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/913a4896-fef0-4a24-a143-d99183546680-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "913a4896-fef0-4a24-a143-d99183546680" (UID: "913a4896-fef0-4a24-a143-d99183546680"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.050662 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/913a4896-fef0-4a24-a143-d99183546680-kube-api-access-kjk2z" (OuterVolumeSpecName: "kube-api-access-kjk2z") pod "913a4896-fef0-4a24-a143-d99183546680" (UID: "913a4896-fef0-4a24-a143-d99183546680"). InnerVolumeSpecName "kube-api-access-kjk2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.130318 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba195b9-1fb2-446d-9644-271c87d97b4f-operator-scripts\") pod \"5ba195b9-1fb2-446d-9644-271c87d97b4f\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.130520 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cdxq\" (UniqueName: \"kubernetes.io/projected/ed29adfa-9314-48f2-a5ce-b0615faace71-kube-api-access-6cdxq\") pod \"ed29adfa-9314-48f2-a5ce-b0615faace71\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.130616 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ghwz\" (UniqueName: \"kubernetes.io/projected/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-kube-api-access-2ghwz\") pod \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.130720 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed29adfa-9314-48f2-a5ce-b0615faace71-operator-scripts\") pod \"ed29adfa-9314-48f2-a5ce-b0615faace71\" (UID: \"ed29adfa-9314-48f2-a5ce-b0615faace71\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.130779 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-operator-scripts\") pod \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\" (UID: \"e64460e1-5b29-4c67-8b81-ca53d91dcfd2\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.130837 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgp87\" (UniqueName: \"kubernetes.io/projected/5ba195b9-1fb2-446d-9644-271c87d97b4f-kube-api-access-lgp87\") pod \"5ba195b9-1fb2-446d-9644-271c87d97b4f\" (UID: \"5ba195b9-1fb2-446d-9644-271c87d97b4f\") " Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.131000 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ba195b9-1fb2-446d-9644-271c87d97b4f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5ba195b9-1fb2-446d-9644-271c87d97b4f" (UID: "5ba195b9-1fb2-446d-9644-271c87d97b4f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.131344 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjk2z\" (UniqueName: \"kubernetes.io/projected/913a4896-fef0-4a24-a143-d99183546680-kube-api-access-kjk2z\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.131370 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/913a4896-fef0-4a24-a143-d99183546680-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.131382 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5ba195b9-1fb2-446d-9644-271c87d97b4f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.131443 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed29adfa-9314-48f2-a5ce-b0615faace71-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed29adfa-9314-48f2-a5ce-b0615faace71" (UID: "ed29adfa-9314-48f2-a5ce-b0615faace71"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.131670 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e64460e1-5b29-4c67-8b81-ca53d91dcfd2" (UID: "e64460e1-5b29-4c67-8b81-ca53d91dcfd2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.135930 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-kube-api-access-2ghwz" (OuterVolumeSpecName: "kube-api-access-2ghwz") pod "e64460e1-5b29-4c67-8b81-ca53d91dcfd2" (UID: "e64460e1-5b29-4c67-8b81-ca53d91dcfd2"). InnerVolumeSpecName "kube-api-access-2ghwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.137647 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed29adfa-9314-48f2-a5ce-b0615faace71-kube-api-access-6cdxq" (OuterVolumeSpecName: "kube-api-access-6cdxq") pod "ed29adfa-9314-48f2-a5ce-b0615faace71" (UID: "ed29adfa-9314-48f2-a5ce-b0615faace71"). InnerVolumeSpecName "kube-api-access-6cdxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.138182 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ba195b9-1fb2-446d-9644-271c87d97b4f-kube-api-access-lgp87" (OuterVolumeSpecName: "kube-api-access-lgp87") pod "5ba195b9-1fb2-446d-9644-271c87d97b4f" (UID: "5ba195b9-1fb2-446d-9644-271c87d97b4f"). InnerVolumeSpecName "kube-api-access-lgp87". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.233517 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ghwz\" (UniqueName: \"kubernetes.io/projected/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-kube-api-access-2ghwz\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.233583 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed29adfa-9314-48f2-a5ce-b0615faace71-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.233594 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e64460e1-5b29-4c67-8b81-ca53d91dcfd2-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.233609 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgp87\" (UniqueName: \"kubernetes.io/projected/5ba195b9-1fb2-446d-9644-271c87d97b4f-kube-api-access-lgp87\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.233619 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cdxq\" (UniqueName: \"kubernetes.io/projected/ed29adfa-9314-48f2-a5ce-b0615faace71-kube-api-access-6cdxq\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.491439 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1620-account-create-update-h4vrp" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.491458 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1620-account-create-update-h4vrp" event={"ID":"ed29adfa-9314-48f2-a5ce-b0615faace71","Type":"ContainerDied","Data":"8d0d12107e404c5f2865163c3ef343ce46c6955d36c22b1c52bee2c7b87ca0b9"} Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.491513 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d0d12107e404c5f2865163c3ef343ce46c6955d36c22b1c52bee2c7b87ca0b9" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.494484 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-q7wck" event={"ID":"913a4896-fef0-4a24-a143-d99183546680","Type":"ContainerDied","Data":"061acc3b90e0795eb67275bd780349a6b5e956fc09032b63079c2c8bceecb3ca"} Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.494573 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="061acc3b90e0795eb67275bd780349a6b5e956fc09032b63079c2c8bceecb3ca" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.494587 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-q7wck" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.497235 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" event={"ID":"e64460e1-5b29-4c67-8b81-ca53d91dcfd2","Type":"ContainerDied","Data":"05e3e2f4ee88c02d5581fa82ae5436b62052b050e75e222e8552e8bf08bbd857"} Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.497290 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05e3e2f4ee88c02d5581fa82ae5436b62052b050e75e222e8552e8bf08bbd857" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.497453 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-47e9-account-create-update-bgbkb" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.505222 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" event={"ID":"5ba195b9-1fb2-446d-9644-271c87d97b4f","Type":"ContainerDied","Data":"d579438a2fb8d74a039965a6363444a154e2d0989cdf174a5e5b66039edfc62f"} Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.505273 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d579438a2fb8d74a039965a6363444a154e2d0989cdf174a5e5b66039edfc62f" Nov 30 08:11:01 crc kubenswrapper[4941]: I1130 08:11:01.505299 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-92b9-account-create-update-bvvmg" Nov 30 08:11:02 crc kubenswrapper[4941]: I1130 08:11:02.978920 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:11:02 crc kubenswrapper[4941]: I1130 08:11:02.979435 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:11:02 crc kubenswrapper[4941]: I1130 08:11:02.979493 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:11:02 crc kubenswrapper[4941]: I1130 08:11:02.980561 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b4e31e26d1d1b739fd99a437fefc2ed911d6c5ebbad46f7debdd4b9fe9b10aa8"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:11:02 crc kubenswrapper[4941]: I1130 08:11:02.980628 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://b4e31e26d1d1b739fd99a437fefc2ed911d6c5ebbad46f7debdd4b9fe9b10aa8" gracePeriod=600 Nov 30 08:11:03 crc kubenswrapper[4941]: I1130 08:11:03.537258 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="b4e31e26d1d1b739fd99a437fefc2ed911d6c5ebbad46f7debdd4b9fe9b10aa8" exitCode=0 Nov 30 08:11:03 crc kubenswrapper[4941]: I1130 08:11:03.537319 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"b4e31e26d1d1b739fd99a437fefc2ed911d6c5ebbad46f7debdd4b9fe9b10aa8"} Nov 30 08:11:03 crc kubenswrapper[4941]: I1130 08:11:03.537775 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f"} Nov 30 08:11:03 crc kubenswrapper[4941]: I1130 08:11:03.537825 4941 scope.go:117] "RemoveContainer" containerID="0da7b0de16f533ed618a489e419338e5c3bdc5efe314ac2cf7e37dfc33b0eea7" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.279033 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9r8h"] Nov 30 08:11:07 crc kubenswrapper[4941]: E1130 08:11:07.281422 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="763a8718-0d67-4fda-afee-a01eebc05063" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.281509 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="763a8718-0d67-4fda-afee-a01eebc05063" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: E1130 08:11:07.281588 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ba195b9-1fb2-446d-9644-271c87d97b4f" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.281649 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ba195b9-1fb2-446d-9644-271c87d97b4f" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: E1130 08:11:07.281743 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64460e1-5b29-4c67-8b81-ca53d91dcfd2" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.281803 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64460e1-5b29-4c67-8b81-ca53d91dcfd2" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: E1130 08:11:07.281875 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed29adfa-9314-48f2-a5ce-b0615faace71" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.281937 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed29adfa-9314-48f2-a5ce-b0615faace71" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: E1130 08:11:07.282010 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="913a4896-fef0-4a24-a143-d99183546680" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282073 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="913a4896-fef0-4a24-a143-d99183546680" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: E1130 08:11:07.282136 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce27667-dc7c-41ec-837f-c924456e2e1e" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282196 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce27667-dc7c-41ec-837f-c924456e2e1e" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282456 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ba195b9-1fb2-446d-9644-271c87d97b4f" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282606 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce27667-dc7c-41ec-837f-c924456e2e1e" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282681 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed29adfa-9314-48f2-a5ce-b0615faace71" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282750 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e64460e1-5b29-4c67-8b81-ca53d91dcfd2" containerName="mariadb-account-create-update" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282810 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="913a4896-fef0-4a24-a143-d99183546680" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.282866 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="763a8718-0d67-4fda-afee-a01eebc05063" containerName="mariadb-database-create" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.283563 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.285821 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5rw75" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.288700 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.289431 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.296007 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9r8h"] Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.364949 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.365150 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-scripts\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.365436 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjb7s\" (UniqueName: \"kubernetes.io/projected/3297a490-2e16-4d8f-b43b-49035dfd9d24-kube-api-access-fjb7s\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.365558 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-config-data\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.467175 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjb7s\" (UniqueName: \"kubernetes.io/projected/3297a490-2e16-4d8f-b43b-49035dfd9d24-kube-api-access-fjb7s\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.467254 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-config-data\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.467342 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.467393 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-scripts\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.789681 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-scripts\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.790156 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-config-data\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.791273 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.792009 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjb7s\" (UniqueName: \"kubernetes.io/projected/3297a490-2e16-4d8f-b43b-49035dfd9d24-kube-api-access-fjb7s\") pod \"nova-cell0-conductor-db-sync-c9r8h\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:07 crc kubenswrapper[4941]: I1130 08:11:07.904854 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:08 crc kubenswrapper[4941]: I1130 08:11:08.403404 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9r8h"] Nov 30 08:11:08 crc kubenswrapper[4941]: I1130 08:11:08.621805 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" event={"ID":"3297a490-2e16-4d8f-b43b-49035dfd9d24","Type":"ContainerStarted","Data":"4babec0d846448318bd69b8a169898f28035bc13309722a44b531afff0e793d9"} Nov 30 08:11:18 crc kubenswrapper[4941]: I1130 08:11:18.774152 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" event={"ID":"3297a490-2e16-4d8f-b43b-49035dfd9d24","Type":"ContainerStarted","Data":"c1770aeab67d07993bbada27c7cd95226f99ec9d3c91227e856e86512fc145c0"} Nov 30 08:11:18 crc kubenswrapper[4941]: I1130 08:11:18.800740 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" podStartSLOduration=2.363179599 podStartE2EDuration="11.800723238s" podCreationTimestamp="2025-11-30 08:11:07 +0000 UTC" firstStartedPulling="2025-11-30 08:11:08.428270368 +0000 UTC m=+5089.196441977" lastFinishedPulling="2025-11-30 08:11:17.865814007 +0000 UTC m=+5098.633985616" observedRunningTime="2025-11-30 08:11:18.79951183 +0000 UTC m=+5099.567683439" watchObservedRunningTime="2025-11-30 08:11:18.800723238 +0000 UTC m=+5099.568894847" Nov 30 08:11:23 crc kubenswrapper[4941]: I1130 08:11:23.834728 4941 generic.go:334] "Generic (PLEG): container finished" podID="3297a490-2e16-4d8f-b43b-49035dfd9d24" containerID="c1770aeab67d07993bbada27c7cd95226f99ec9d3c91227e856e86512fc145c0" exitCode=0 Nov 30 08:11:23 crc kubenswrapper[4941]: I1130 08:11:23.834820 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" event={"ID":"3297a490-2e16-4d8f-b43b-49035dfd9d24","Type":"ContainerDied","Data":"c1770aeab67d07993bbada27c7cd95226f99ec9d3c91227e856e86512fc145c0"} Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.264473 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.301856 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-combined-ca-bundle\") pod \"3297a490-2e16-4d8f-b43b-49035dfd9d24\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.302031 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-config-data\") pod \"3297a490-2e16-4d8f-b43b-49035dfd9d24\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.302238 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-scripts\") pod \"3297a490-2e16-4d8f-b43b-49035dfd9d24\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.302348 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjb7s\" (UniqueName: \"kubernetes.io/projected/3297a490-2e16-4d8f-b43b-49035dfd9d24-kube-api-access-fjb7s\") pod \"3297a490-2e16-4d8f-b43b-49035dfd9d24\" (UID: \"3297a490-2e16-4d8f-b43b-49035dfd9d24\") " Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.325487 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3297a490-2e16-4d8f-b43b-49035dfd9d24-kube-api-access-fjb7s" (OuterVolumeSpecName: "kube-api-access-fjb7s") pod "3297a490-2e16-4d8f-b43b-49035dfd9d24" (UID: "3297a490-2e16-4d8f-b43b-49035dfd9d24"). InnerVolumeSpecName "kube-api-access-fjb7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.327538 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-scripts" (OuterVolumeSpecName: "scripts") pod "3297a490-2e16-4d8f-b43b-49035dfd9d24" (UID: "3297a490-2e16-4d8f-b43b-49035dfd9d24"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.344380 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3297a490-2e16-4d8f-b43b-49035dfd9d24" (UID: "3297a490-2e16-4d8f-b43b-49035dfd9d24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.344984 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-config-data" (OuterVolumeSpecName: "config-data") pod "3297a490-2e16-4d8f-b43b-49035dfd9d24" (UID: "3297a490-2e16-4d8f-b43b-49035dfd9d24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.404859 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.404911 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.404930 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3297a490-2e16-4d8f-b43b-49035dfd9d24-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.404949 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjb7s\" (UniqueName: \"kubernetes.io/projected/3297a490-2e16-4d8f-b43b-49035dfd9d24-kube-api-access-fjb7s\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.862125 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" event={"ID":"3297a490-2e16-4d8f-b43b-49035dfd9d24","Type":"ContainerDied","Data":"4babec0d846448318bd69b8a169898f28035bc13309722a44b531afff0e793d9"} Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.862771 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4babec0d846448318bd69b8a169898f28035bc13309722a44b531afff0e793d9" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.862256 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c9r8h" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.967504 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:11:25 crc kubenswrapper[4941]: E1130 08:11:25.968233 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3297a490-2e16-4d8f-b43b-49035dfd9d24" containerName="nova-cell0-conductor-db-sync" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.968267 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3297a490-2e16-4d8f-b43b-49035dfd9d24" containerName="nova-cell0-conductor-db-sync" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.968558 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3297a490-2e16-4d8f-b43b-49035dfd9d24" containerName="nova-cell0-conductor-db-sync" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.969518 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.972567 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-5rw75" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.972771 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 30 08:11:25 crc kubenswrapper[4941]: I1130 08:11:25.981135 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.025178 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.025241 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckvh9\" (UniqueName: \"kubernetes.io/projected/b22ca75a-372d-42f8-89ad-9a8c88546f58-kube-api-access-ckvh9\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.025385 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.127226 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.127461 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.127496 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckvh9\" (UniqueName: \"kubernetes.io/projected/b22ca75a-372d-42f8-89ad-9a8c88546f58-kube-api-access-ckvh9\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.134408 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.134805 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.149943 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckvh9\" (UniqueName: \"kubernetes.io/projected/b22ca75a-372d-42f8-89ad-9a8c88546f58-kube-api-access-ckvh9\") pod \"nova-cell0-conductor-0\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.340240 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.834111 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:11:26 crc kubenswrapper[4941]: I1130 08:11:26.872211 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b22ca75a-372d-42f8-89ad-9a8c88546f58","Type":"ContainerStarted","Data":"81fdac5de95498187ff4135e183a7f8585c4a23f78efc518ab0ba5d6c7debcf4"} Nov 30 08:11:27 crc kubenswrapper[4941]: I1130 08:11:27.886539 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b22ca75a-372d-42f8-89ad-9a8c88546f58","Type":"ContainerStarted","Data":"be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667"} Nov 30 08:11:27 crc kubenswrapper[4941]: I1130 08:11:27.887433 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:27 crc kubenswrapper[4941]: I1130 08:11:27.922101 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.9220448660000002 podStartE2EDuration="2.922044866s" podCreationTimestamp="2025-11-30 08:11:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:27.912085858 +0000 UTC m=+5108.680257467" watchObservedRunningTime="2025-11-30 08:11:27.922044866 +0000 UTC m=+5108.690216475" Nov 30 08:11:36 crc kubenswrapper[4941]: I1130 08:11:36.377277 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 30 08:11:36 crc kubenswrapper[4941]: I1130 08:11:36.882029 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-zxbm8"] Nov 30 08:11:36 crc kubenswrapper[4941]: I1130 08:11:36.884372 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:36 crc kubenswrapper[4941]: I1130 08:11:36.886205 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 30 08:11:36 crc kubenswrapper[4941]: I1130 08:11:36.887470 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 30 08:11:36 crc kubenswrapper[4941]: I1130 08:11:36.894198 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zxbm8"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.019158 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.033100 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.040226 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.070463 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwk8z\" (UniqueName: \"kubernetes.io/projected/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-kube-api-access-xwk8z\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.070578 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-config-data\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.070610 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.071592 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-scripts\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.124612 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173707 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-scripts\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173793 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-logs\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173843 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173869 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-config-data\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173920 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwk8z\" (UniqueName: \"kubernetes.io/projected/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-kube-api-access-xwk8z\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173943 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mtc8\" (UniqueName: \"kubernetes.io/projected/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-kube-api-access-5mtc8\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.173978 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-config-data\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.174004 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.178686 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.180607 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.184385 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.190206 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-scripts\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.192867 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-config-data\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.201409 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.213128 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.223007 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwk8z\" (UniqueName: \"kubernetes.io/projected/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-kube-api-access-xwk8z\") pod \"nova-cell0-cell-mapping-zxbm8\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.236401 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.237843 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.241921 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.253293 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277452 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glfkj\" (UniqueName: \"kubernetes.io/projected/247bd724-38a2-4bce-8fe7-f62b799e4f8e-kube-api-access-glfkj\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277520 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-config-data\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277582 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mtc8\" (UniqueName: \"kubernetes.io/projected/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-kube-api-access-5mtc8\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277641 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/247bd724-38a2-4bce-8fe7-f62b799e4f8e-logs\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277682 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-config-data\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277704 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-logs\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277743 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.277762 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.280738 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-logs\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.288196 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.311728 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8c7b8fc9-zkzn8"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.312139 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-config-data\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.312266 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mtc8\" (UniqueName: \"kubernetes.io/projected/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-kube-api-access-5mtc8\") pod \"nova-api-0\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.313992 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.330711 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8c7b8fc9-zkzn8"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.357733 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.360034 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.363601 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.379756 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381248 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381350 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glfkj\" (UniqueName: \"kubernetes.io/projected/247bd724-38a2-4bce-8fe7-f62b799e4f8e-kube-api-access-glfkj\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381416 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-config-data\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381479 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdf6n\" (UniqueName: \"kubernetes.io/projected/431c61f7-17a1-4838-8d39-140f6bdc6d48-kube-api-access-kdf6n\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381797 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381892 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/247bd724-38a2-4bce-8fe7-f62b799e4f8e-logs\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.381997 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-config-data\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.382668 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/247bd724-38a2-4bce-8fe7-f62b799e4f8e-logs\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.386316 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.391907 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.418287 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-config-data\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.418742 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glfkj\" (UniqueName: \"kubernetes.io/projected/247bd724-38a2-4bce-8fe7-f62b799e4f8e-kube-api-access-glfkj\") pod \"nova-metadata-0\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.443528 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486214 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486276 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-sb\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486317 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486360 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-nb\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486398 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-config\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486427 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qcv6\" (UniqueName: \"kubernetes.io/projected/700d548b-4429-4ba2-b203-ee546c2eb43f-kube-api-access-4qcv6\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486467 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486493 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-config-data\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486512 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l65lq\" (UniqueName: \"kubernetes.io/projected/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-kube-api-access-l65lq\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486536 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdf6n\" (UniqueName: \"kubernetes.io/projected/431c61f7-17a1-4838-8d39-140f6bdc6d48-kube-api-access-kdf6n\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.486560 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-dns-svc\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.492764 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.493051 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-config-data\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.505931 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdf6n\" (UniqueName: \"kubernetes.io/projected/431c61f7-17a1-4838-8d39-140f6bdc6d48-kube-api-access-kdf6n\") pod \"nova-scheduler-0\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.513712 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.593811 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594316 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-nb\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594405 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-config\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qcv6\" (UniqueName: \"kubernetes.io/projected/700d548b-4429-4ba2-b203-ee546c2eb43f-kube-api-access-4qcv6\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594490 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594538 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l65lq\" (UniqueName: \"kubernetes.io/projected/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-kube-api-access-l65lq\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594567 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-dns-svc\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.594637 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-sb\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.595766 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-sb\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.598470 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-nb\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.599056 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-config\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.599405 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-dns-svc\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.610083 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.619001 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.631893 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l65lq\" (UniqueName: \"kubernetes.io/projected/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-kube-api-access-l65lq\") pod \"nova-cell1-novncproxy-0\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.637730 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qcv6\" (UniqueName: \"kubernetes.io/projected/700d548b-4429-4ba2-b203-ee546c2eb43f-kube-api-access-4qcv6\") pod \"dnsmasq-dns-b8c7b8fc9-zkzn8\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.763016 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.774117 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:37 crc kubenswrapper[4941]: I1130 08:11:37.790790 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.011454 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dmdkw"] Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.013009 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.016898 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.017033 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.056104 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dmdkw"] Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.072249 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.090297 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.108503 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.108571 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-scripts\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.108755 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-config-data\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.109112 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnr4t\" (UniqueName: \"kubernetes.io/projected/f9f9867d-77d1-42f5-aca3-2b84112deb56-kube-api-access-qnr4t\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.161585 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-zxbm8"] Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.212687 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.212767 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-scripts\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.212809 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-config-data\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.212882 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnr4t\" (UniqueName: \"kubernetes.io/projected/f9f9867d-77d1-42f5-aca3-2b84112deb56-kube-api-access-qnr4t\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.217857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.219128 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-config-data\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.239760 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-scripts\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.249205 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnr4t\" (UniqueName: \"kubernetes.io/projected/f9f9867d-77d1-42f5-aca3-2b84112deb56-kube-api-access-qnr4t\") pod \"nova-cell1-conductor-db-sync-dmdkw\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.358223 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.362038 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:38 crc kubenswrapper[4941]: W1130 08:11:38.396966 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod431c61f7_17a1_4838_8d39_140f6bdc6d48.slice/crio-0659d06d088eadad3ce607d17bc3a65b49a73e30dada3d291e787c130edb6c5d WatchSource:0}: Error finding container 0659d06d088eadad3ce607d17bc3a65b49a73e30dada3d291e787c130edb6c5d: Status 404 returned error can't find the container with id 0659d06d088eadad3ce607d17bc3a65b49a73e30dada3d291e787c130edb6c5d Nov 30 08:11:38 crc kubenswrapper[4941]: I1130 08:11:38.553649 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8c7b8fc9-zkzn8"] Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.032880 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zxbm8" event={"ID":"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2","Type":"ContainerStarted","Data":"1b37ff5150eb44ccd342e129be05564d33d508de3f8e20421fb1815bf920ae51"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.033379 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zxbm8" event={"ID":"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2","Type":"ContainerStarted","Data":"385b391be1ea403e35d01097f60a067b07e6d919db7bed10cb30b71b507b022d"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.035977 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ecee0c79-274e-4f9c-b9d4-467bc3e370a6","Type":"ContainerStarted","Data":"edec7823604001ea98f64913d59772c170ade8d9876ce373c327e3c66cbf02d0"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.038049 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"431c61f7-17a1-4838-8d39-140f6bdc6d48","Type":"ContainerStarted","Data":"0659d06d088eadad3ce607d17bc3a65b49a73e30dada3d291e787c130edb6c5d"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.046049 4941 generic.go:334] "Generic (PLEG): container finished" podID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerID="daee6fd6c5ea69aeb4b1459565c0d92b78312a133fb8923ecb5a4dec7724ed61" exitCode=0 Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.046119 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" event={"ID":"700d548b-4429-4ba2-b203-ee546c2eb43f","Type":"ContainerDied","Data":"daee6fd6c5ea69aeb4b1459565c0d92b78312a133fb8923ecb5a4dec7724ed61"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.046140 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" event={"ID":"700d548b-4429-4ba2-b203-ee546c2eb43f","Type":"ContainerStarted","Data":"d18b9c2dd5550e0b283e041d321e955b57e5b0fb582156c33f934e5f659eb679"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.056812 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-zxbm8" podStartSLOduration=3.056795133 podStartE2EDuration="3.056795133s" podCreationTimestamp="2025-11-30 08:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:39.047227937 +0000 UTC m=+5119.815399556" watchObservedRunningTime="2025-11-30 08:11:39.056795133 +0000 UTC m=+5119.824966742" Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.059448 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"247bd724-38a2-4bce-8fe7-f62b799e4f8e","Type":"ContainerStarted","Data":"c87160a7a93a448dcf5cd01576659ee3b71c144a2e1c78813b68d003d2ffeb12"} Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.257208 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:11:39 crc kubenswrapper[4941]: I1130 08:11:39.372951 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dmdkw"] Nov 30 08:11:40 crc kubenswrapper[4941]: I1130 08:11:40.071352 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" event={"ID":"700d548b-4429-4ba2-b203-ee546c2eb43f","Type":"ContainerStarted","Data":"191a1d3ddff67eec4b1d0ceb61434ccc38a8668846429b18dc0112fdd677c49f"} Nov 30 08:11:40 crc kubenswrapper[4941]: I1130 08:11:40.071880 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:40 crc kubenswrapper[4941]: I1130 08:11:40.090696 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" podStartSLOduration=3.090670142 podStartE2EDuration="3.090670142s" podCreationTimestamp="2025-11-30 08:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:40.089504156 +0000 UTC m=+5120.857675765" watchObservedRunningTime="2025-11-30 08:11:40.090670142 +0000 UTC m=+5120.858841751" Nov 30 08:11:41 crc kubenswrapper[4941]: I1130 08:11:41.089188 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" event={"ID":"f9f9867d-77d1-42f5-aca3-2b84112deb56","Type":"ContainerStarted","Data":"47d011dbf4d777aa0bfa98d5622833447717ff36be2aff7dd9e80fff51203eb1"} Nov 30 08:11:42 crc kubenswrapper[4941]: I1130 08:11:42.117800 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a9a5e3c2-ae0c-4884-a48b-705bfda74cda","Type":"ContainerStarted","Data":"af0ea63ed474bf377a39115bb8e95a0b292e3ea5bc7a3e6bdb4cb02c25be1f49"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.134724 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ecee0c79-274e-4f9c-b9d4-467bc3e370a6","Type":"ContainerStarted","Data":"53d311d948f473d507fa1005f78d9b07e86af904cbd06cd7510418e732b8da6c"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.137079 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ecee0c79-274e-4f9c-b9d4-467bc3e370a6","Type":"ContainerStarted","Data":"b754f3768a2ef8eb69d109d9b16d1869a1662f042c556220f10e0889cbb1d743"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.139133 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" event={"ID":"f9f9867d-77d1-42f5-aca3-2b84112deb56","Type":"ContainerStarted","Data":"d1ce930786b6b8bc19f10296db2aac96177843602929014e1c5e56ac8ccc2a36"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.142089 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"431c61f7-17a1-4838-8d39-140f6bdc6d48","Type":"ContainerStarted","Data":"4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.145478 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"247bd724-38a2-4bce-8fe7-f62b799e4f8e","Type":"ContainerStarted","Data":"fd590181835bab9c1c8b5945f946e8d413603d46fda3cb36441c24cdc4b6e040"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.145658 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"247bd724-38a2-4bce-8fe7-f62b799e4f8e","Type":"ContainerStarted","Data":"b0ff277bdfac8cdfb24dc35693381df167a2a08c0c0ddb9dde9ee8803e38cc43"} Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.168851 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.27674755 podStartE2EDuration="7.168826364s" podCreationTimestamp="2025-11-30 08:11:36 +0000 UTC" firstStartedPulling="2025-11-30 08:11:38.055749688 +0000 UTC m=+5118.823921297" lastFinishedPulling="2025-11-30 08:11:41.947828502 +0000 UTC m=+5122.716000111" observedRunningTime="2025-11-30 08:11:43.162933101 +0000 UTC m=+5123.931104720" watchObservedRunningTime="2025-11-30 08:11:43.168826364 +0000 UTC m=+5123.936997983" Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.206155 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" podStartSLOduration=6.206112355 podStartE2EDuration="6.206112355s" podCreationTimestamp="2025-11-30 08:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:43.191063331 +0000 UTC m=+5123.959234960" watchObservedRunningTime="2025-11-30 08:11:43.206112355 +0000 UTC m=+5123.974283964" Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.223122 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.6798470869999997 podStartE2EDuration="6.22308157s" podCreationTimestamp="2025-11-30 08:11:37 +0000 UTC" firstStartedPulling="2025-11-30 08:11:38.404242198 +0000 UTC m=+5119.172413807" lastFinishedPulling="2025-11-30 08:11:41.947476681 +0000 UTC m=+5122.715648290" observedRunningTime="2025-11-30 08:11:43.212914706 +0000 UTC m=+5123.981086325" watchObservedRunningTime="2025-11-30 08:11:43.22308157 +0000 UTC m=+5123.991253199" Nov 30 08:11:43 crc kubenswrapper[4941]: I1130 08:11:43.256180 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.402730624 podStartE2EDuration="6.256146522s" podCreationTimestamp="2025-11-30 08:11:37 +0000 UTC" firstStartedPulling="2025-11-30 08:11:38.096684584 +0000 UTC m=+5118.864856193" lastFinishedPulling="2025-11-30 08:11:41.950100482 +0000 UTC m=+5122.718272091" observedRunningTime="2025-11-30 08:11:43.239758545 +0000 UTC m=+5124.007930164" watchObservedRunningTime="2025-11-30 08:11:43.256146522 +0000 UTC m=+5124.024318131" Nov 30 08:11:44 crc kubenswrapper[4941]: I1130 08:11:44.157450 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a9a5e3c2-ae0c-4884-a48b-705bfda74cda","Type":"ContainerStarted","Data":"d91b3f0bed97a8e8c12e9e1e3c39e9df136efaab2861f66263721f487a5145a3"} Nov 30 08:11:44 crc kubenswrapper[4941]: I1130 08:11:44.193736 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=5.704183226 podStartE2EDuration="7.193710765s" podCreationTimestamp="2025-11-30 08:11:37 +0000 UTC" firstStartedPulling="2025-11-30 08:11:41.899965303 +0000 UTC m=+5122.668136912" lastFinishedPulling="2025-11-30 08:11:43.389492832 +0000 UTC m=+5124.157664451" observedRunningTime="2025-11-30 08:11:44.184596623 +0000 UTC m=+5124.952768232" watchObservedRunningTime="2025-11-30 08:11:44.193710765 +0000 UTC m=+5124.961882374" Nov 30 08:11:44 crc kubenswrapper[4941]: I1130 08:11:44.537800 4941 scope.go:117] "RemoveContainer" containerID="bcd2d404f5852c37d654708a967b39897fc3ad4f1129d7cf67687e683be92388" Nov 30 08:11:45 crc kubenswrapper[4941]: I1130 08:11:45.176859 4941 generic.go:334] "Generic (PLEG): container finished" podID="5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" containerID="1b37ff5150eb44ccd342e129be05564d33d508de3f8e20421fb1815bf920ae51" exitCode=0 Nov 30 08:11:45 crc kubenswrapper[4941]: I1130 08:11:45.177057 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zxbm8" event={"ID":"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2","Type":"ContainerDied","Data":"1b37ff5150eb44ccd342e129be05564d33d508de3f8e20421fb1815bf920ae51"} Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.200068 4941 generic.go:334] "Generic (PLEG): container finished" podID="f9f9867d-77d1-42f5-aca3-2b84112deb56" containerID="d1ce930786b6b8bc19f10296db2aac96177843602929014e1c5e56ac8ccc2a36" exitCode=0 Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.200184 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" event={"ID":"f9f9867d-77d1-42f5-aca3-2b84112deb56","Type":"ContainerDied","Data":"d1ce930786b6b8bc19f10296db2aac96177843602929014e1c5e56ac8ccc2a36"} Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.687158 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.864127 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwk8z\" (UniqueName: \"kubernetes.io/projected/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-kube-api-access-xwk8z\") pod \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.864244 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-scripts\") pod \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.864284 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-config-data\") pod \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.864318 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-combined-ca-bundle\") pod \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\" (UID: \"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2\") " Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.871572 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-scripts" (OuterVolumeSpecName: "scripts") pod "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" (UID: "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.873470 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-kube-api-access-xwk8z" (OuterVolumeSpecName: "kube-api-access-xwk8z") pod "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" (UID: "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2"). InnerVolumeSpecName "kube-api-access-xwk8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.899485 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-config-data" (OuterVolumeSpecName: "config-data") pod "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" (UID: "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.917206 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" (UID: "5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.967133 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwk8z\" (UniqueName: \"kubernetes.io/projected/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-kube-api-access-xwk8z\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.967180 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.967194 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:46 crc kubenswrapper[4941]: I1130 08:11:46.967241 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.213798 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-zxbm8" event={"ID":"5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2","Type":"ContainerDied","Data":"385b391be1ea403e35d01097f60a067b07e6d919db7bed10cb30b71b507b022d"} Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.214269 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="385b391be1ea403e35d01097f60a067b07e6d919db7bed10cb30b71b507b022d" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.213901 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-zxbm8" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.382675 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.382726 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.450447 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.451745 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.451772 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.451787 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.478187 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.502609 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.502812 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="431c61f7-17a1-4838-8d39-140f6bdc6d48" containerName="nova-scheduler-scheduler" containerID="cri-o://4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0" gracePeriod=30 Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.519652 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.660567 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.764577 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.782808 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.786278 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-combined-ca-bundle\") pod \"f9f9867d-77d1-42f5-aca3-2b84112deb56\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.786470 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-config-data\") pod \"f9f9867d-77d1-42f5-aca3-2b84112deb56\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.786528 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-scripts\") pod \"f9f9867d-77d1-42f5-aca3-2b84112deb56\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.786555 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnr4t\" (UniqueName: \"kubernetes.io/projected/f9f9867d-77d1-42f5-aca3-2b84112deb56-kube-api-access-qnr4t\") pod \"f9f9867d-77d1-42f5-aca3-2b84112deb56\" (UID: \"f9f9867d-77d1-42f5-aca3-2b84112deb56\") " Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.794036 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.795577 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.796133 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-scripts" (OuterVolumeSpecName: "scripts") pod "f9f9867d-77d1-42f5-aca3-2b84112deb56" (UID: "f9f9867d-77d1-42f5-aca3-2b84112deb56"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.816602 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9f9867d-77d1-42f5-aca3-2b84112deb56-kube-api-access-qnr4t" (OuterVolumeSpecName: "kube-api-access-qnr4t") pod "f9f9867d-77d1-42f5-aca3-2b84112deb56" (UID: "f9f9867d-77d1-42f5-aca3-2b84112deb56"). InnerVolumeSpecName "kube-api-access-qnr4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.841368 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9f9867d-77d1-42f5-aca3-2b84112deb56" (UID: "f9f9867d-77d1-42f5-aca3-2b84112deb56"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.844275 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-config-data" (OuterVolumeSpecName: "config-data") pod "f9f9867d-77d1-42f5-aca3-2b84112deb56" (UID: "f9f9867d-77d1-42f5-aca3-2b84112deb56"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.878635 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.889917 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.889946 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.889957 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnr4t\" (UniqueName: \"kubernetes.io/projected/f9f9867d-77d1-42f5-aca3-2b84112deb56-kube-api-access-qnr4t\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.889969 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f9867d-77d1-42f5-aca3-2b84112deb56-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.893725 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c7bfb66bc-pwc2c"] Nov 30 08:11:47 crc kubenswrapper[4941]: I1130 08:11:47.894012 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerName="dnsmasq-dns" containerID="cri-o://6cdf13dfefe4df81e69106ff8df5b157d1c3f2b706653e88321ef1f501ad14fd" gracePeriod=10 Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.246557 4941 generic.go:334] "Generic (PLEG): container finished" podID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerID="6cdf13dfefe4df81e69106ff8df5b157d1c3f2b706653e88321ef1f501ad14fd" exitCode=0 Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.246645 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" event={"ID":"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5","Type":"ContainerDied","Data":"6cdf13dfefe4df81e69106ff8df5b157d1c3f2b706653e88321ef1f501ad14fd"} Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.275624 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-log" containerID="cri-o://b754f3768a2ef8eb69d109d9b16d1869a1662f042c556220f10e0889cbb1d743" gracePeriod=30 Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.276035 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.284602 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dmdkw" event={"ID":"f9f9867d-77d1-42f5-aca3-2b84112deb56","Type":"ContainerDied","Data":"47d011dbf4d777aa0bfa98d5622833447717ff36be2aff7dd9e80fff51203eb1"} Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.284659 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47d011dbf4d777aa0bfa98d5622833447717ff36be2aff7dd9e80fff51203eb1" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.288277 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-api" containerID="cri-o://53d311d948f473d507fa1005f78d9b07e86af904cbd06cd7510418e732b8da6c" gracePeriod=30 Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.312519 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.321715 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.54:8774/\": EOF" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.322094 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.54:8774/\": EOF" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.328574 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:11:48 crc kubenswrapper[4941]: E1130 08:11:48.329011 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9f9867d-77d1-42f5-aca3-2b84112deb56" containerName="nova-cell1-conductor-db-sync" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.329029 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9f9867d-77d1-42f5-aca3-2b84112deb56" containerName="nova-cell1-conductor-db-sync" Nov 30 08:11:48 crc kubenswrapper[4941]: E1130 08:11:48.329063 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" containerName="nova-manage" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.329071 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" containerName="nova-manage" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.329266 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" containerName="nova-manage" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.329294 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9f9867d-77d1-42f5-aca3-2b84112deb56" containerName="nova-cell1-conductor-db-sync" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.330085 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.335304 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.342042 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.465567 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.506960 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chn5v\" (UniqueName: \"kubernetes.io/projected/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-kube-api-access-chn5v\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.508440 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.508607 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.564656 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.55:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.564784 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.55:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.612831 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-dns-svc\") pod \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.613272 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-sb\") pod \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.613905 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-nb\") pod \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.614305 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-config\") pod \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.614473 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65bfx\" (UniqueName: \"kubernetes.io/projected/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-kube-api-access-65bfx\") pod \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\" (UID: \"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5\") " Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.615588 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chn5v\" (UniqueName: \"kubernetes.io/projected/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-kube-api-access-chn5v\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.615806 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.615897 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.632610 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-kube-api-access-65bfx" (OuterVolumeSpecName: "kube-api-access-65bfx") pod "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" (UID: "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5"). InnerVolumeSpecName "kube-api-access-65bfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.636120 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.658346 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chn5v\" (UniqueName: \"kubernetes.io/projected/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-kube-api-access-chn5v\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.660366 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.680303 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.718438 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65bfx\" (UniqueName: \"kubernetes.io/projected/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-kube-api-access-65bfx\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.764359 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" (UID: "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.796037 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" (UID: "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.820757 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.821130 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:48 crc kubenswrapper[4941]: E1130 08:11:48.821534 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecee0c79_274e_4f9c_b9d4_467bc3e370a6.slice/crio-conmon-b754f3768a2ef8eb69d109d9b16d1869a1662f042c556220f10e0889cbb1d743.scope\": RecentStats: unable to find data in memory cache]" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.845781 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-config" (OuterVolumeSpecName: "config") pod "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" (UID: "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.854235 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" (UID: "d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.924526 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:48 crc kubenswrapper[4941]: I1130 08:11:48.924568 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.250668 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.291485 4941 generic.go:334] "Generic (PLEG): container finished" podID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerID="b754f3768a2ef8eb69d109d9b16d1869a1662f042c556220f10e0889cbb1d743" exitCode=143 Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.291504 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ecee0c79-274e-4f9c-b9d4-467bc3e370a6","Type":"ContainerDied","Data":"b754f3768a2ef8eb69d109d9b16d1869a1662f042c556220f10e0889cbb1d743"} Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.293199 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3","Type":"ContainerStarted","Data":"8a3010ffc703425f2b86d9e6a2e780576e2ef506d2680929c301fdd5b6dc3355"} Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.295446 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" event={"ID":"d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5","Type":"ContainerDied","Data":"b27a36cab1a2f454705537eaaaa8d271098f14799eebb56858bce7cf7c0df970"} Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.295612 4941 scope.go:117] "RemoveContainer" containerID="6cdf13dfefe4df81e69106ff8df5b157d1c3f2b706653e88321ef1f501ad14fd" Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.295883 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c7bfb66bc-pwc2c" Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.295985 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-log" containerID="cri-o://b0ff277bdfac8cdfb24dc35693381df167a2a08c0c0ddb9dde9ee8803e38cc43" gracePeriod=30 Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.296194 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-metadata" containerID="cri-o://fd590181835bab9c1c8b5945f946e8d413603d46fda3cb36441c24cdc4b6e040" gracePeriod=30 Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.363465 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c7bfb66bc-pwc2c"] Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.372081 4941 scope.go:117] "RemoveContainer" containerID="d9e96df571dfad7f30d59f6e09eb1fb8147a1117d30ecbcf0e2b8690b6905487" Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.372463 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c7bfb66bc-pwc2c"] Nov 30 08:11:49 crc kubenswrapper[4941]: I1130 08:11:49.534640 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" path="/var/lib/kubelet/pods/d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5/volumes" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.100224 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.147548 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-combined-ca-bundle\") pod \"431c61f7-17a1-4838-8d39-140f6bdc6d48\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.147615 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdf6n\" (UniqueName: \"kubernetes.io/projected/431c61f7-17a1-4838-8d39-140f6bdc6d48-kube-api-access-kdf6n\") pod \"431c61f7-17a1-4838-8d39-140f6bdc6d48\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.147649 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-config-data\") pod \"431c61f7-17a1-4838-8d39-140f6bdc6d48\" (UID: \"431c61f7-17a1-4838-8d39-140f6bdc6d48\") " Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.153615 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/431c61f7-17a1-4838-8d39-140f6bdc6d48-kube-api-access-kdf6n" (OuterVolumeSpecName: "kube-api-access-kdf6n") pod "431c61f7-17a1-4838-8d39-140f6bdc6d48" (UID: "431c61f7-17a1-4838-8d39-140f6bdc6d48"). InnerVolumeSpecName "kube-api-access-kdf6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.180291 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "431c61f7-17a1-4838-8d39-140f6bdc6d48" (UID: "431c61f7-17a1-4838-8d39-140f6bdc6d48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.181046 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-config-data" (OuterVolumeSpecName: "config-data") pod "431c61f7-17a1-4838-8d39-140f6bdc6d48" (UID: "431c61f7-17a1-4838-8d39-140f6bdc6d48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.248993 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.249028 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdf6n\" (UniqueName: \"kubernetes.io/projected/431c61f7-17a1-4838-8d39-140f6bdc6d48-kube-api-access-kdf6n\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.249038 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/431c61f7-17a1-4838-8d39-140f6bdc6d48-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.310448 4941 generic.go:334] "Generic (PLEG): container finished" podID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerID="b0ff277bdfac8cdfb24dc35693381df167a2a08c0c0ddb9dde9ee8803e38cc43" exitCode=143 Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.310541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"247bd724-38a2-4bce-8fe7-f62b799e4f8e","Type":"ContainerDied","Data":"b0ff277bdfac8cdfb24dc35693381df167a2a08c0c0ddb9dde9ee8803e38cc43"} Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.312449 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3","Type":"ContainerStarted","Data":"a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af"} Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.312564 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.321864 4941 generic.go:334] "Generic (PLEG): container finished" podID="431c61f7-17a1-4838-8d39-140f6bdc6d48" containerID="4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0" exitCode=0 Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.321915 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.321954 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"431c61f7-17a1-4838-8d39-140f6bdc6d48","Type":"ContainerDied","Data":"4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0"} Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.322040 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"431c61f7-17a1-4838-8d39-140f6bdc6d48","Type":"ContainerDied","Data":"0659d06d088eadad3ce607d17bc3a65b49a73e30dada3d291e787c130edb6c5d"} Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.322074 4941 scope.go:117] "RemoveContainer" containerID="4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.347867 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.347840599 podStartE2EDuration="2.347840599s" podCreationTimestamp="2025-11-30 08:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:50.339536503 +0000 UTC m=+5131.107708112" watchObservedRunningTime="2025-11-30 08:11:50.347840599 +0000 UTC m=+5131.116012208" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.356189 4941 scope.go:117] "RemoveContainer" containerID="4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0" Nov 30 08:11:50 crc kubenswrapper[4941]: E1130 08:11:50.356734 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0\": container with ID starting with 4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0 not found: ID does not exist" containerID="4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.356836 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0"} err="failed to get container status \"4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0\": rpc error: code = NotFound desc = could not find container \"4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0\": container with ID starting with 4dc0e9213690c754e0f80073d3f1c025f9ba66f8b3d4c3709383687e50b055a0 not found: ID does not exist" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.374410 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.392910 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.413044 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:50 crc kubenswrapper[4941]: E1130 08:11:50.413999 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="431c61f7-17a1-4838-8d39-140f6bdc6d48" containerName="nova-scheduler-scheduler" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.414085 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="431c61f7-17a1-4838-8d39-140f6bdc6d48" containerName="nova-scheduler-scheduler" Nov 30 08:11:50 crc kubenswrapper[4941]: E1130 08:11:50.414169 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerName="init" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.414254 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerName="init" Nov 30 08:11:50 crc kubenswrapper[4941]: E1130 08:11:50.414348 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerName="dnsmasq-dns" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.414428 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerName="dnsmasq-dns" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.414732 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="431c61f7-17a1-4838-8d39-140f6bdc6d48" containerName="nova-scheduler-scheduler" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.414821 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6a8e9b7-b12e-4441-af86-2f73a9e7a4c5" containerName="dnsmasq-dns" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.416111 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.425349 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.425675 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.455170 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-config-data\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.455251 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.455522 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpghh\" (UniqueName: \"kubernetes.io/projected/c6ed5710-d0ee-4581-a612-728298aa0c00-kube-api-access-dpghh\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.558766 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpghh\" (UniqueName: \"kubernetes.io/projected/c6ed5710-d0ee-4581-a612-728298aa0c00-kube-api-access-dpghh\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.558863 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-config-data\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.558923 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.567419 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-config-data\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.569106 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.580218 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpghh\" (UniqueName: \"kubernetes.io/projected/c6ed5710-d0ee-4581-a612-728298aa0c00-kube-api-access-dpghh\") pod \"nova-scheduler-0\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " pod="openstack/nova-scheduler-0" Nov 30 08:11:50 crc kubenswrapper[4941]: I1130 08:11:50.750154 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:11:51 crc kubenswrapper[4941]: I1130 08:11:51.244196 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:11:51 crc kubenswrapper[4941]: W1130 08:11:51.246063 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6ed5710_d0ee_4581_a612_728298aa0c00.slice/crio-e6e2d1b38cc68965024e0231ec974f51b5819e9b0478d5e714bf5b8bd43087f6 WatchSource:0}: Error finding container e6e2d1b38cc68965024e0231ec974f51b5819e9b0478d5e714bf5b8bd43087f6: Status 404 returned error can't find the container with id e6e2d1b38cc68965024e0231ec974f51b5819e9b0478d5e714bf5b8bd43087f6 Nov 30 08:11:51 crc kubenswrapper[4941]: I1130 08:11:51.347966 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c6ed5710-d0ee-4581-a612-728298aa0c00","Type":"ContainerStarted","Data":"e6e2d1b38cc68965024e0231ec974f51b5819e9b0478d5e714bf5b8bd43087f6"} Nov 30 08:11:51 crc kubenswrapper[4941]: I1130 08:11:51.545850 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="431c61f7-17a1-4838-8d39-140f6bdc6d48" path="/var/lib/kubelet/pods/431c61f7-17a1-4838-8d39-140f6bdc6d48/volumes" Nov 30 08:11:52 crc kubenswrapper[4941]: I1130 08:11:52.373130 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c6ed5710-d0ee-4581-a612-728298aa0c00","Type":"ContainerStarted","Data":"5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08"} Nov 30 08:11:52 crc kubenswrapper[4941]: I1130 08:11:52.396678 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.396652267 podStartE2EDuration="2.396652267s" podCreationTimestamp="2025-11-30 08:11:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:52.390690392 +0000 UTC m=+5133.158862001" watchObservedRunningTime="2025-11-30 08:11:52.396652267 +0000 UTC m=+5133.164823876" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.403645 4941 generic.go:334] "Generic (PLEG): container finished" podID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerID="fd590181835bab9c1c8b5945f946e8d413603d46fda3cb36441c24cdc4b6e040" exitCode=0 Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.404432 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"247bd724-38a2-4bce-8fe7-f62b799e4f8e","Type":"ContainerDied","Data":"fd590181835bab9c1c8b5945f946e8d413603d46fda3cb36441c24cdc4b6e040"} Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.404462 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"247bd724-38a2-4bce-8fe7-f62b799e4f8e","Type":"ContainerDied","Data":"c87160a7a93a448dcf5cd01576659ee3b71c144a2e1c78813b68d003d2ffeb12"} Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.404473 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c87160a7a93a448dcf5cd01576659ee3b71c144a2e1c78813b68d003d2ffeb12" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.407200 4941 generic.go:334] "Generic (PLEG): container finished" podID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerID="53d311d948f473d507fa1005f78d9b07e86af904cbd06cd7510418e732b8da6c" exitCode=0 Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.407247 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ecee0c79-274e-4f9c-b9d4-467bc3e370a6","Type":"ContainerDied","Data":"53d311d948f473d507fa1005f78d9b07e86af904cbd06cd7510418e732b8da6c"} Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.407315 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ecee0c79-274e-4f9c-b9d4-467bc3e370a6","Type":"ContainerDied","Data":"edec7823604001ea98f64913d59772c170ade8d9876ce373c327e3c66cbf02d0"} Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.407345 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="edec7823604001ea98f64913d59772c170ade8d9876ce373c327e3c66cbf02d0" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.447961 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.453924 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.473876 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-combined-ca-bundle\") pod \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.473924 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-combined-ca-bundle\") pod \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.473953 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mtc8\" (UniqueName: \"kubernetes.io/projected/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-kube-api-access-5mtc8\") pod \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.473996 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-config-data\") pod \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.474032 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-logs\") pod \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\" (UID: \"ecee0c79-274e-4f9c-b9d4-467bc3e370a6\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.474055 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glfkj\" (UniqueName: \"kubernetes.io/projected/247bd724-38a2-4bce-8fe7-f62b799e4f8e-kube-api-access-glfkj\") pod \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.474077 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/247bd724-38a2-4bce-8fe7-f62b799e4f8e-logs\") pod \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.474107 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-config-data\") pod \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\" (UID: \"247bd724-38a2-4bce-8fe7-f62b799e4f8e\") " Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.478170 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/247bd724-38a2-4bce-8fe7-f62b799e4f8e-logs" (OuterVolumeSpecName: "logs") pod "247bd724-38a2-4bce-8fe7-f62b799e4f8e" (UID: "247bd724-38a2-4bce-8fe7-f62b799e4f8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.478610 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-logs" (OuterVolumeSpecName: "logs") pod "ecee0c79-274e-4f9c-b9d4-467bc3e370a6" (UID: "ecee0c79-274e-4f9c-b9d4-467bc3e370a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.482732 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-kube-api-access-5mtc8" (OuterVolumeSpecName: "kube-api-access-5mtc8") pod "ecee0c79-274e-4f9c-b9d4-467bc3e370a6" (UID: "ecee0c79-274e-4f9c-b9d4-467bc3e370a6"). InnerVolumeSpecName "kube-api-access-5mtc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.489097 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/247bd724-38a2-4bce-8fe7-f62b799e4f8e-kube-api-access-glfkj" (OuterVolumeSpecName: "kube-api-access-glfkj") pod "247bd724-38a2-4bce-8fe7-f62b799e4f8e" (UID: "247bd724-38a2-4bce-8fe7-f62b799e4f8e"). InnerVolumeSpecName "kube-api-access-glfkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.534221 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecee0c79-274e-4f9c-b9d4-467bc3e370a6" (UID: "ecee0c79-274e-4f9c-b9d4-467bc3e370a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.539337 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "247bd724-38a2-4bce-8fe7-f62b799e4f8e" (UID: "247bd724-38a2-4bce-8fe7-f62b799e4f8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.542733 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-config-data" (OuterVolumeSpecName: "config-data") pod "247bd724-38a2-4bce-8fe7-f62b799e4f8e" (UID: "247bd724-38a2-4bce-8fe7-f62b799e4f8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.544741 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-config-data" (OuterVolumeSpecName: "config-data") pod "ecee0c79-274e-4f9c-b9d4-467bc3e370a6" (UID: "ecee0c79-274e-4f9c-b9d4-467bc3e370a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.578972 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579374 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579461 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mtc8\" (UniqueName: \"kubernetes.io/projected/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-kube-api-access-5mtc8\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579572 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579688 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecee0c79-274e-4f9c-b9d4-467bc3e370a6-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579777 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glfkj\" (UniqueName: \"kubernetes.io/projected/247bd724-38a2-4bce-8fe7-f62b799e4f8e-kube-api-access-glfkj\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579847 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/247bd724-38a2-4bce-8fe7-f62b799e4f8e-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:54 crc kubenswrapper[4941]: I1130 08:11:54.579915 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/247bd724-38a2-4bce-8fe7-f62b799e4f8e-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.421267 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.421402 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.493666 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.558022 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.585603 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.605761 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.624424 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: E1130 08:11:55.624979 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-log" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625004 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-log" Nov 30 08:11:55 crc kubenswrapper[4941]: E1130 08:11:55.625033 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-api" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625042 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-api" Nov 30 08:11:55 crc kubenswrapper[4941]: E1130 08:11:55.625108 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-metadata" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625117 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-metadata" Nov 30 08:11:55 crc kubenswrapper[4941]: E1130 08:11:55.625136 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-log" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625144 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-log" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625366 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-log" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625390 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-metadata" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625415 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" containerName="nova-api-api" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.625434 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" containerName="nova-metadata-log" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.626931 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.629148 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.632885 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.644394 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.646841 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.650110 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.656106 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.751540 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.809142 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-logs\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.809199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.809585 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lntw\" (UniqueName: \"kubernetes.io/projected/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-kube-api-access-2lntw\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.809721 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nscff\" (UniqueName: \"kubernetes.io/projected/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-kube-api-access-nscff\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.810156 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-logs\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.810379 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.810711 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-config-data\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.810805 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-config-data\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.912670 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-logs\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.912741 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.912779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-config-data\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.912824 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-config-data\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.913271 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-logs\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.913950 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-logs\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.914024 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.914077 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lntw\" (UniqueName: \"kubernetes.io/projected/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-kube-api-access-2lntw\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.914105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nscff\" (UniqueName: \"kubernetes.io/projected/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-kube-api-access-nscff\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.914233 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-logs\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.920589 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-config-data\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.920905 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-config-data\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.920953 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.923771 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.933101 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lntw\" (UniqueName: \"kubernetes.io/projected/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-kube-api-access-2lntw\") pod \"nova-api-0\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " pod="openstack/nova-api-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.933372 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nscff\" (UniqueName: \"kubernetes.io/projected/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-kube-api-access-nscff\") pod \"nova-metadata-0\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.949113 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:11:55 crc kubenswrapper[4941]: I1130 08:11:55.966046 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:11:56 crc kubenswrapper[4941]: I1130 08:11:56.475434 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:11:56 crc kubenswrapper[4941]: W1130 08:11:56.479897 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65cf43e9_3dd6_4682_a582_d9a70c9f24fa.slice/crio-e017068f469a8d0b64917c9684f3aeac990d062ae7f610379e13b5171a562886 WatchSource:0}: Error finding container e017068f469a8d0b64917c9684f3aeac990d062ae7f610379e13b5171a562886: Status 404 returned error can't find the container with id e017068f469a8d0b64917c9684f3aeac990d062ae7f610379e13b5171a562886 Nov 30 08:11:56 crc kubenswrapper[4941]: I1130 08:11:56.546152 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:11:56 crc kubenswrapper[4941]: W1130 08:11:56.562471 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb72c8b68_9e59_4da1_9ba3_a7f7bd30c3a5.slice/crio-fc1616e6fe4b8db76ca4e9e7d155aae448985af1cc4ece50e5343b8f53b749e7 WatchSource:0}: Error finding container fc1616e6fe4b8db76ca4e9e7d155aae448985af1cc4ece50e5343b8f53b749e7: Status 404 returned error can't find the container with id fc1616e6fe4b8db76ca4e9e7d155aae448985af1cc4ece50e5343b8f53b749e7 Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.455400 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5","Type":"ContainerStarted","Data":"d3abd67f4c99dedad0df851c6fc020c8c6b6e993232e77e5726a1fd96c8c770f"} Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.455913 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5","Type":"ContainerStarted","Data":"5fc2b12e31d0390dc9ebe2903453a5e135f7df5ef9e22a1ed9f41ece7cb1ac46"} Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.455925 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5","Type":"ContainerStarted","Data":"fc1616e6fe4b8db76ca4e9e7d155aae448985af1cc4ece50e5343b8f53b749e7"} Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.460116 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65cf43e9-3dd6-4682-a582-d9a70c9f24fa","Type":"ContainerStarted","Data":"4b51fb3d0c63b396f0320f8c5023723d2279c51d66360b416725320bbddc6498"} Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.460172 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65cf43e9-3dd6-4682-a582-d9a70c9f24fa","Type":"ContainerStarted","Data":"fd718bf4aec1ea824253675cf43e15327b404db3e54014a014383facec7108c0"} Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.460185 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65cf43e9-3dd6-4682-a582-d9a70c9f24fa","Type":"ContainerStarted","Data":"e017068f469a8d0b64917c9684f3aeac990d062ae7f610379e13b5171a562886"} Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.481447 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.481413352 podStartE2EDuration="2.481413352s" podCreationTimestamp="2025-11-30 08:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:57.477689698 +0000 UTC m=+5138.245861347" watchObservedRunningTime="2025-11-30 08:11:57.481413352 +0000 UTC m=+5138.249584991" Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.508413 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.508299555 podStartE2EDuration="2.508299555s" podCreationTimestamp="2025-11-30 08:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:11:57.505651423 +0000 UTC m=+5138.273823032" watchObservedRunningTime="2025-11-30 08:11:57.508299555 +0000 UTC m=+5138.276471184" Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.535217 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="247bd724-38a2-4bce-8fe7-f62b799e4f8e" path="/var/lib/kubelet/pods/247bd724-38a2-4bce-8fe7-f62b799e4f8e/volumes" Nov 30 08:11:57 crc kubenswrapper[4941]: I1130 08:11:57.536200 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecee0c79-274e-4f9c-b9d4-467bc3e370a6" path="/var/lib/kubelet/pods/ecee0c79-274e-4f9c-b9d4-467bc3e370a6/volumes" Nov 30 08:11:58 crc kubenswrapper[4941]: I1130 08:11:58.718534 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.346574 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-dfwtd"] Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.348350 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.353865 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.354075 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.360902 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dfwtd"] Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.497775 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-scripts\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.497875 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.497897 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjhm7\" (UniqueName: \"kubernetes.io/projected/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-kube-api-access-rjhm7\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.497961 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-config-data\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.599878 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-scripts\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.599993 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.600016 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjhm7\" (UniqueName: \"kubernetes.io/projected/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-kube-api-access-rjhm7\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.600085 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-config-data\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.610220 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-scripts\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.610268 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-config-data\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.617484 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.617493 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjhm7\" (UniqueName: \"kubernetes.io/projected/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-kube-api-access-rjhm7\") pod \"nova-cell1-cell-mapping-dfwtd\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:11:59 crc kubenswrapper[4941]: I1130 08:11:59.677912 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:12:00 crc kubenswrapper[4941]: W1130 08:12:00.146794 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c6318bb_a4f0_4d55_90f3_8b44ac03b110.slice/crio-49fcadc9f0c45243fadc94d54615b0e4f77b1a2162f512c4c61645636aff2cfd WatchSource:0}: Error finding container 49fcadc9f0c45243fadc94d54615b0e4f77b1a2162f512c4c61645636aff2cfd: Status 404 returned error can't find the container with id 49fcadc9f0c45243fadc94d54615b0e4f77b1a2162f512c4c61645636aff2cfd Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.146840 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dfwtd"] Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.504454 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dfwtd" event={"ID":"9c6318bb-a4f0-4d55-90f3-8b44ac03b110","Type":"ContainerStarted","Data":"833b447cc45962121e5bfd11ecde96d19308c294322f4909cbf5452ab2cbcd43"} Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.504849 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dfwtd" event={"ID":"9c6318bb-a4f0-4d55-90f3-8b44ac03b110","Type":"ContainerStarted","Data":"49fcadc9f0c45243fadc94d54615b0e4f77b1a2162f512c4c61645636aff2cfd"} Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.523389 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-dfwtd" podStartSLOduration=1.523372953 podStartE2EDuration="1.523372953s" podCreationTimestamp="2025-11-30 08:11:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:12:00.5197494 +0000 UTC m=+5141.287921009" watchObservedRunningTime="2025-11-30 08:12:00.523372953 +0000 UTC m=+5141.291544562" Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.751184 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.799411 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.949469 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:12:00 crc kubenswrapper[4941]: I1130 08:12:00.949882 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:12:01 crc kubenswrapper[4941]: I1130 08:12:01.577530 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 08:12:05 crc kubenswrapper[4941]: I1130 08:12:05.583967 4941 generic.go:334] "Generic (PLEG): container finished" podID="9c6318bb-a4f0-4d55-90f3-8b44ac03b110" containerID="833b447cc45962121e5bfd11ecde96d19308c294322f4909cbf5452ab2cbcd43" exitCode=0 Nov 30 08:12:05 crc kubenswrapper[4941]: I1130 08:12:05.584120 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dfwtd" event={"ID":"9c6318bb-a4f0-4d55-90f3-8b44ac03b110","Type":"ContainerDied","Data":"833b447cc45962121e5bfd11ecde96d19308c294322f4909cbf5452ab2cbcd43"} Nov 30 08:12:05 crc kubenswrapper[4941]: I1130 08:12:05.950111 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:12:05 crc kubenswrapper[4941]: I1130 08:12:05.950227 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:12:05 crc kubenswrapper[4941]: I1130 08:12:05.966424 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:12:05 crc kubenswrapper[4941]: I1130 08:12:05.966516 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:12:06 crc kubenswrapper[4941]: I1130 08:12:06.984445 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.103532 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-config-data\") pod \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.103668 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-scripts\") pod \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.103713 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-combined-ca-bundle\") pod \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.103766 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjhm7\" (UniqueName: \"kubernetes.io/projected/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-kube-api-access-rjhm7\") pod \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\" (UID: \"9c6318bb-a4f0-4d55-90f3-8b44ac03b110\") " Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.115829 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.62:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.115882 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-kube-api-access-rjhm7" (OuterVolumeSpecName: "kube-api-access-rjhm7") pod "9c6318bb-a4f0-4d55-90f3-8b44ac03b110" (UID: "9c6318bb-a4f0-4d55-90f3-8b44ac03b110"). InnerVolumeSpecName "kube-api-access-rjhm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.115924 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.63:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.116013 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.62:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.116034 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.63:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.116780 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-scripts" (OuterVolumeSpecName: "scripts") pod "9c6318bb-a4f0-4d55-90f3-8b44ac03b110" (UID: "9c6318bb-a4f0-4d55-90f3-8b44ac03b110"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.146667 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-config-data" (OuterVolumeSpecName: "config-data") pod "9c6318bb-a4f0-4d55-90f3-8b44ac03b110" (UID: "9c6318bb-a4f0-4d55-90f3-8b44ac03b110"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.146797 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c6318bb-a4f0-4d55-90f3-8b44ac03b110" (UID: "9c6318bb-a4f0-4d55-90f3-8b44ac03b110"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.206895 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.206942 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.206954 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.206968 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjhm7\" (UniqueName: \"kubernetes.io/projected/9c6318bb-a4f0-4d55-90f3-8b44ac03b110-kube-api-access-rjhm7\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.613150 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dfwtd" event={"ID":"9c6318bb-a4f0-4d55-90f3-8b44ac03b110","Type":"ContainerDied","Data":"49fcadc9f0c45243fadc94d54615b0e4f77b1a2162f512c4c61645636aff2cfd"} Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.613674 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49fcadc9f0c45243fadc94d54615b0e4f77b1a2162f512c4c61645636aff2cfd" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.613224 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dfwtd" Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.805475 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.805732 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-log" containerID="cri-o://fd718bf4aec1ea824253675cf43e15327b404db3e54014a014383facec7108c0" gracePeriod=30 Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.805891 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-api" containerID="cri-o://4b51fb3d0c63b396f0320f8c5023723d2279c51d66360b416725320bbddc6498" gracePeriod=30 Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.849629 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.850144 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-log" containerID="cri-o://5fc2b12e31d0390dc9ebe2903453a5e135f7df5ef9e22a1ed9f41ece7cb1ac46" gracePeriod=30 Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.850389 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-metadata" containerID="cri-o://d3abd67f4c99dedad0df851c6fc020c8c6b6e993232e77e5726a1fd96c8c770f" gracePeriod=30 Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.865477 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:12:07 crc kubenswrapper[4941]: I1130 08:12:07.865732 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c6ed5710-d0ee-4581-a612-728298aa0c00" containerName="nova-scheduler-scheduler" containerID="cri-o://5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" gracePeriod=30 Nov 30 08:12:08 crc kubenswrapper[4941]: I1130 08:12:08.625797 4941 generic.go:334] "Generic (PLEG): container finished" podID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerID="fd718bf4aec1ea824253675cf43e15327b404db3e54014a014383facec7108c0" exitCode=143 Nov 30 08:12:08 crc kubenswrapper[4941]: I1130 08:12:08.625914 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65cf43e9-3dd6-4682-a582-d9a70c9f24fa","Type":"ContainerDied","Data":"fd718bf4aec1ea824253675cf43e15327b404db3e54014a014383facec7108c0"} Nov 30 08:12:08 crc kubenswrapper[4941]: I1130 08:12:08.629276 4941 generic.go:334] "Generic (PLEG): container finished" podID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerID="5fc2b12e31d0390dc9ebe2903453a5e135f7df5ef9e22a1ed9f41ece7cb1ac46" exitCode=143 Nov 30 08:12:08 crc kubenswrapper[4941]: I1130 08:12:08.629337 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5","Type":"ContainerDied","Data":"5fc2b12e31d0390dc9ebe2903453a5e135f7df5ef9e22a1ed9f41ece7cb1ac46"} Nov 30 08:12:10 crc kubenswrapper[4941]: E1130 08:12:10.754546 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 08:12:10 crc kubenswrapper[4941]: E1130 08:12:10.757300 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 08:12:10 crc kubenswrapper[4941]: E1130 08:12:10.759179 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 08:12:10 crc kubenswrapper[4941]: E1130 08:12:10.759245 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c6ed5710-d0ee-4581-a612-728298aa0c00" containerName="nova-scheduler-scheduler" Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.680536 4941 generic.go:334] "Generic (PLEG): container finished" podID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerID="d3abd67f4c99dedad0df851c6fc020c8c6b6e993232e77e5726a1fd96c8c770f" exitCode=0 Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.680623 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5","Type":"ContainerDied","Data":"d3abd67f4c99dedad0df851c6fc020c8c6b6e993232e77e5726a1fd96c8c770f"} Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.684133 4941 generic.go:334] "Generic (PLEG): container finished" podID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerID="4b51fb3d0c63b396f0320f8c5023723d2279c51d66360b416725320bbddc6498" exitCode=0 Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.684180 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65cf43e9-3dd6-4682-a582-d9a70c9f24fa","Type":"ContainerDied","Data":"4b51fb3d0c63b396f0320f8c5023723d2279c51d66360b416725320bbddc6498"} Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.920175 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.929015 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.960930 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nscff\" (UniqueName: \"kubernetes.io/projected/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-kube-api-access-nscff\") pod \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961004 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-logs\") pod \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961096 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-config-data\") pod \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961241 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-config-data\") pod \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961301 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lntw\" (UniqueName: \"kubernetes.io/projected/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-kube-api-access-2lntw\") pod \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961361 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-logs\") pod \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961467 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-combined-ca-bundle\") pod \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\" (UID: \"65cf43e9-3dd6-4682-a582-d9a70c9f24fa\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.961599 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-combined-ca-bundle\") pod \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\" (UID: \"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5\") " Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.964842 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-logs" (OuterVolumeSpecName: "logs") pod "65cf43e9-3dd6-4682-a582-d9a70c9f24fa" (UID: "65cf43e9-3dd6-4682-a582-d9a70c9f24fa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.965130 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-logs" (OuterVolumeSpecName: "logs") pod "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" (UID: "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.968808 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-kube-api-access-2lntw" (OuterVolumeSpecName: "kube-api-access-2lntw") pod "65cf43e9-3dd6-4682-a582-d9a70c9f24fa" (UID: "65cf43e9-3dd6-4682-a582-d9a70c9f24fa"). InnerVolumeSpecName "kube-api-access-2lntw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:12 crc kubenswrapper[4941]: I1130 08:12:12.970683 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-kube-api-access-nscff" (OuterVolumeSpecName: "kube-api-access-nscff") pod "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" (UID: "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5"). InnerVolumeSpecName "kube-api-access-nscff". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.004575 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-config-data" (OuterVolumeSpecName: "config-data") pod "65cf43e9-3dd6-4682-a582-d9a70c9f24fa" (UID: "65cf43e9-3dd6-4682-a582-d9a70c9f24fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.012644 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" (UID: "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.015398 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-config-data" (OuterVolumeSpecName: "config-data") pod "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" (UID: "b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.016001 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65cf43e9-3dd6-4682-a582-d9a70c9f24fa" (UID: "65cf43e9-3dd6-4682-a582-d9a70c9f24fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069248 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069412 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lntw\" (UniqueName: \"kubernetes.io/projected/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-kube-api-access-2lntw\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069552 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069609 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cf43e9-3dd6-4682-a582-d9a70c9f24fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069665 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069719 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nscff\" (UniqueName: \"kubernetes.io/projected/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-kube-api-access-nscff\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069767 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.069814 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.253686 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.272804 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-combined-ca-bundle\") pod \"c6ed5710-d0ee-4581-a612-728298aa0c00\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.273098 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpghh\" (UniqueName: \"kubernetes.io/projected/c6ed5710-d0ee-4581-a612-728298aa0c00-kube-api-access-dpghh\") pod \"c6ed5710-d0ee-4581-a612-728298aa0c00\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.273147 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-config-data\") pod \"c6ed5710-d0ee-4581-a612-728298aa0c00\" (UID: \"c6ed5710-d0ee-4581-a612-728298aa0c00\") " Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.285865 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6ed5710-d0ee-4581-a612-728298aa0c00-kube-api-access-dpghh" (OuterVolumeSpecName: "kube-api-access-dpghh") pod "c6ed5710-d0ee-4581-a612-728298aa0c00" (UID: "c6ed5710-d0ee-4581-a612-728298aa0c00"). InnerVolumeSpecName "kube-api-access-dpghh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.299230 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-config-data" (OuterVolumeSpecName: "config-data") pod "c6ed5710-d0ee-4581-a612-728298aa0c00" (UID: "c6ed5710-d0ee-4581-a612-728298aa0c00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.310552 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6ed5710-d0ee-4581-a612-728298aa0c00" (UID: "c6ed5710-d0ee-4581-a612-728298aa0c00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.376066 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.376110 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpghh\" (UniqueName: \"kubernetes.io/projected/c6ed5710-d0ee-4581-a612-728298aa0c00-kube-api-access-dpghh\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.376126 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6ed5710-d0ee-4581-a612-728298aa0c00-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.701236 4941 generic.go:334] "Generic (PLEG): container finished" podID="c6ed5710-d0ee-4581-a612-728298aa0c00" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" exitCode=0 Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.701792 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c6ed5710-d0ee-4581-a612-728298aa0c00","Type":"ContainerDied","Data":"5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08"} Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.701830 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c6ed5710-d0ee-4581-a612-728298aa0c00","Type":"ContainerDied","Data":"e6e2d1b38cc68965024e0231ec974f51b5819e9b0478d5e714bf5b8bd43087f6"} Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.701854 4941 scope.go:117] "RemoveContainer" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.702074 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.707161 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"65cf43e9-3dd6-4682-a582-d9a70c9f24fa","Type":"ContainerDied","Data":"e017068f469a8d0b64917c9684f3aeac990d062ae7f610379e13b5171a562886"} Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.707245 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.714412 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5","Type":"ContainerDied","Data":"fc1616e6fe4b8db76ca4e9e7d155aae448985af1cc4ece50e5343b8f53b749e7"} Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.714539 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.732668 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.742419 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.753589 4941 scope.go:117] "RemoveContainer" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.754903 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08\": container with ID starting with 5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08 not found: ID does not exist" containerID="5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.754940 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08"} err="failed to get container status \"5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08\": rpc error: code = NotFound desc = could not find container \"5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08\": container with ID starting with 5a3e653dcbf4f47c6a3ee3139538782544765bf78de29681a5a7db9d6f2bde08 not found: ID does not exist" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.754960 4941 scope.go:117] "RemoveContainer" containerID="4b51fb3d0c63b396f0320f8c5023723d2279c51d66360b416725320bbddc6498" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.763109 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.774678 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.785684 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.786435 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-log" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.786557 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-log" Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.786642 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-log" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.786720 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-log" Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.786834 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-api" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.786895 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-api" Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.786957 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-metadata" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787014 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-metadata" Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.787106 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6ed5710-d0ee-4581-a612-728298aa0c00" containerName="nova-scheduler-scheduler" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787178 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6ed5710-d0ee-4581-a612-728298aa0c00" containerName="nova-scheduler-scheduler" Nov 30 08:12:13 crc kubenswrapper[4941]: E1130 08:12:13.787253 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6318bb-a4f0-4d55-90f3-8b44ac03b110" containerName="nova-manage" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787341 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6318bb-a4f0-4d55-90f3-8b44ac03b110" containerName="nova-manage" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787664 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-api" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787750 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6ed5710-d0ee-4581-a612-728298aa0c00" containerName="nova-scheduler-scheduler" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787800 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-metadata" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787870 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" containerName="nova-metadata-log" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787937 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" containerName="nova-api-log" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.787991 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c6318bb-a4f0-4d55-90f3-8b44ac03b110" containerName="nova-manage" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.788913 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.794132 4941 scope.go:117] "RemoveContainer" containerID="fd718bf4aec1ea824253675cf43e15327b404db3e54014a014383facec7108c0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.794824 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.796265 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.804916 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.815390 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.817165 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.822525 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.828309 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.840019 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.847032 4941 scope.go:117] "RemoveContainer" containerID="d3abd67f4c99dedad0df851c6fc020c8c6b6e993232e77e5726a1fd96c8c770f" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.857873 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.859722 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.865822 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.868115 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.870024 4941 scope.go:117] "RemoveContainer" containerID="5fc2b12e31d0390dc9ebe2903453a5e135f7df5ef9e22a1ed9f41ece7cb1ac46" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885554 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-config-data\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885621 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885653 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885676 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-logs\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885782 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6bvl\" (UniqueName: \"kubernetes.io/projected/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-kube-api-access-j6bvl\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885825 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885890 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7cpk\" (UniqueName: \"kubernetes.io/projected/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-kube-api-access-t7cpk\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885941 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48be0b99-919f-463f-b80d-8d737b43a70b-logs\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.885965 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-config-data\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.886116 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2rgg\" (UniqueName: \"kubernetes.io/projected/48be0b99-919f-463f-b80d-8d737b43a70b-kube-api-access-g2rgg\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.886222 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-config-data\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.988340 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48be0b99-919f-463f-b80d-8d737b43a70b-logs\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.988677 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-config-data\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.988797 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2rgg\" (UniqueName: \"kubernetes.io/projected/48be0b99-919f-463f-b80d-8d737b43a70b-kube-api-access-g2rgg\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.988933 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-config-data\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.988847 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48be0b99-919f-463f-b80d-8d737b43a70b-logs\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989092 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-config-data\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989184 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989253 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989350 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-logs\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989438 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6bvl\" (UniqueName: \"kubernetes.io/projected/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-kube-api-access-j6bvl\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989538 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.989627 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7cpk\" (UniqueName: \"kubernetes.io/projected/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-kube-api-access-t7cpk\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.990617 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-logs\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.995442 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-config-data\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.996887 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.998814 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:13 crc kubenswrapper[4941]: I1130 08:12:13.999573 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-config-data\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.004457 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-config-data\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.006304 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.007336 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7cpk\" (UniqueName: \"kubernetes.io/projected/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-kube-api-access-t7cpk\") pod \"nova-api-0\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " pod="openstack/nova-api-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.007533 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2rgg\" (UniqueName: \"kubernetes.io/projected/48be0b99-919f-463f-b80d-8d737b43a70b-kube-api-access-g2rgg\") pod \"nova-metadata-0\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " pod="openstack/nova-metadata-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.011679 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6bvl\" (UniqueName: \"kubernetes.io/projected/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-kube-api-access-j6bvl\") pod \"nova-scheduler-0\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " pod="openstack/nova-scheduler-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.121954 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.139791 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.181902 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.670556 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.679140 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.741877 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea","Type":"ContainerStarted","Data":"c578cbc2ae17963dee63b54613389c1165bd3973777b6daf4208985d94daa307"} Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.753525 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"48be0b99-919f-463f-b80d-8d737b43a70b","Type":"ContainerStarted","Data":"d7aa162b479b9dabff054a473a95943e9043ca9320658de22e4b31a33cc38372"} Nov 30 08:12:14 crc kubenswrapper[4941]: W1130 08:12:14.803795 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c6ac8f7_56ec_4d98_985e_c8571887dcb9.slice/crio-3b98280ce9ccff212947bb91587bd21de0b9258a53152aa24900f61b0cab2c33 WatchSource:0}: Error finding container 3b98280ce9ccff212947bb91587bd21de0b9258a53152aa24900f61b0cab2c33: Status 404 returned error can't find the container with id 3b98280ce9ccff212947bb91587bd21de0b9258a53152aa24900f61b0cab2c33 Nov 30 08:12:14 crc kubenswrapper[4941]: I1130 08:12:14.806697 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.534656 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65cf43e9-3dd6-4682-a582-d9a70c9f24fa" path="/var/lib/kubelet/pods/65cf43e9-3dd6-4682-a582-d9a70c9f24fa/volumes" Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.536523 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5" path="/var/lib/kubelet/pods/b72c8b68-9e59-4da1-9ba3-a7f7bd30c3a5/volumes" Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.537412 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6ed5710-d0ee-4581-a612-728298aa0c00" path="/var/lib/kubelet/pods/c6ed5710-d0ee-4581-a612-728298aa0c00/volumes" Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.775484 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c6ac8f7-56ec-4d98-985e-c8571887dcb9","Type":"ContainerStarted","Data":"db89befe1a9d31d87ddb66aac310b14ab7e2b2257300dc83ef801b616616ca54"} Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.775569 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c6ac8f7-56ec-4d98-985e-c8571887dcb9","Type":"ContainerStarted","Data":"926e052d09277df688e2ccb6fc589fb80443d0b11fcbead3f28c910a019567ac"} Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.775600 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c6ac8f7-56ec-4d98-985e-c8571887dcb9","Type":"ContainerStarted","Data":"3b98280ce9ccff212947bb91587bd21de0b9258a53152aa24900f61b0cab2c33"} Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.777448 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea","Type":"ContainerStarted","Data":"8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5"} Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.780168 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"48be0b99-919f-463f-b80d-8d737b43a70b","Type":"ContainerStarted","Data":"23ed605cc707fb420bacbb8275741163005a6058a7a0bb3cdecf35773dc9d078"} Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.780233 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"48be0b99-919f-463f-b80d-8d737b43a70b","Type":"ContainerStarted","Data":"d50d2061a1e8afb52fd48ecb439a4f86d9f04f93eca330b76817ea6e475d0ec4"} Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.799051 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.799028431 podStartE2EDuration="2.799028431s" podCreationTimestamp="2025-11-30 08:12:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:12:15.797340459 +0000 UTC m=+5156.565512068" watchObservedRunningTime="2025-11-30 08:12:15.799028431 +0000 UTC m=+5156.567200040" Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.830220 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.830194996 podStartE2EDuration="2.830194996s" podCreationTimestamp="2025-11-30 08:12:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:12:15.819635079 +0000 UTC m=+5156.587806688" watchObservedRunningTime="2025-11-30 08:12:15.830194996 +0000 UTC m=+5156.598366605" Nov 30 08:12:15 crc kubenswrapper[4941]: I1130 08:12:15.842126 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.842106265 podStartE2EDuration="2.842106265s" podCreationTimestamp="2025-11-30 08:12:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:12:15.837869864 +0000 UTC m=+5156.606041473" watchObservedRunningTime="2025-11-30 08:12:15.842106265 +0000 UTC m=+5156.610277874" Nov 30 08:12:19 crc kubenswrapper[4941]: I1130 08:12:19.122294 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 08:12:19 crc kubenswrapper[4941]: I1130 08:12:19.140815 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:12:19 crc kubenswrapper[4941]: I1130 08:12:19.140937 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.123090 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.140599 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.140690 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.171958 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.183580 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.183649 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:12:24 crc kubenswrapper[4941]: I1130 08:12:24.930327 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 08:12:25 crc kubenswrapper[4941]: I1130 08:12:25.224527 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:25 crc kubenswrapper[4941]: I1130 08:12:25.224573 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.66:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:25 crc kubenswrapper[4941]: I1130 08:12:25.306636 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.67:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:25 crc kubenswrapper[4941]: I1130 08:12:25.306677 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.67:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.148221 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.150733 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.155521 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.189436 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.191203 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.191787 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 08:12:34 crc kubenswrapper[4941]: I1130 08:12:34.196646 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.028750 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.032406 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.037660 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.361384 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cc47cc7dc-hqb9q"] Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.363466 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.383314 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cc47cc7dc-hqb9q"] Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.385421 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-config\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.385561 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-dns-svc\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.385709 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-nb\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.385819 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpzk2\" (UniqueName: \"kubernetes.io/projected/195d353f-b5fb-48a4-a458-625ccede1eee-kube-api-access-vpzk2\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.385954 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-sb\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.488282 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-sb\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.488499 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-config\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.488530 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-dns-svc\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.488604 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-nb\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.488646 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpzk2\" (UniqueName: \"kubernetes.io/projected/195d353f-b5fb-48a4-a458-625ccede1eee-kube-api-access-vpzk2\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.489372 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-sb\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.489979 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-nb\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.490293 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-config\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.490488 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-dns-svc\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.519591 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpzk2\" (UniqueName: \"kubernetes.io/projected/195d353f-b5fb-48a4-a458-625ccede1eee-kube-api-access-vpzk2\") pod \"dnsmasq-dns-7cc47cc7dc-hqb9q\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:35 crc kubenswrapper[4941]: I1130 08:12:35.707593 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:36 crc kubenswrapper[4941]: I1130 08:12:36.265815 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cc47cc7dc-hqb9q"] Nov 30 08:12:36 crc kubenswrapper[4941]: W1130 08:12:36.283245 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod195d353f_b5fb_48a4_a458_625ccede1eee.slice/crio-5c193e27e2dcfc8962e79ec2840ea4dc5afa1f4293b6ac889468e6e770648b78 WatchSource:0}: Error finding container 5c193e27e2dcfc8962e79ec2840ea4dc5afa1f4293b6ac889468e6e770648b78: Status 404 returned error can't find the container with id 5c193e27e2dcfc8962e79ec2840ea4dc5afa1f4293b6ac889468e6e770648b78 Nov 30 08:12:37 crc kubenswrapper[4941]: I1130 08:12:37.059728 4941 generic.go:334] "Generic (PLEG): container finished" podID="195d353f-b5fb-48a4-a458-625ccede1eee" containerID="4544d781bbc6dff34cde2d82fc3e5f307c9da333eccedd3f3f3cee2b21a652af" exitCode=0 Nov 30 08:12:37 crc kubenswrapper[4941]: I1130 08:12:37.059805 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" event={"ID":"195d353f-b5fb-48a4-a458-625ccede1eee","Type":"ContainerDied","Data":"4544d781bbc6dff34cde2d82fc3e5f307c9da333eccedd3f3f3cee2b21a652af"} Nov 30 08:12:37 crc kubenswrapper[4941]: I1130 08:12:37.060380 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" event={"ID":"195d353f-b5fb-48a4-a458-625ccede1eee","Type":"ContainerStarted","Data":"5c193e27e2dcfc8962e79ec2840ea4dc5afa1f4293b6ac889468e6e770648b78"} Nov 30 08:12:38 crc kubenswrapper[4941]: I1130 08:12:38.077940 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" event={"ID":"195d353f-b5fb-48a4-a458-625ccede1eee","Type":"ContainerStarted","Data":"db9094ecc70fe8683173f019336a6ad27bda82b378a9046547199c2aa8ffabcf"} Nov 30 08:12:38 crc kubenswrapper[4941]: I1130 08:12:38.078630 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:38 crc kubenswrapper[4941]: I1130 08:12:38.112591 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" podStartSLOduration=3.112563238 podStartE2EDuration="3.112563238s" podCreationTimestamp="2025-11-30 08:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:12:38.106760398 +0000 UTC m=+5178.874932017" watchObservedRunningTime="2025-11-30 08:12:38.112563238 +0000 UTC m=+5178.880734867" Nov 30 08:12:45 crc kubenswrapper[4941]: I1130 08:12:45.709645 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:12:45 crc kubenswrapper[4941]: I1130 08:12:45.791597 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8c7b8fc9-zkzn8"] Nov 30 08:12:45 crc kubenswrapper[4941]: I1130 08:12:45.791965 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerName="dnsmasq-dns" containerID="cri-o://191a1d3ddff67eec4b1d0ceb61434ccc38a8668846429b18dc0112fdd677c49f" gracePeriod=10 Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.208512 4941 generic.go:334] "Generic (PLEG): container finished" podID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerID="191a1d3ddff67eec4b1d0ceb61434ccc38a8668846429b18dc0112fdd677c49f" exitCode=0 Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.208941 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" event={"ID":"700d548b-4429-4ba2-b203-ee546c2eb43f","Type":"ContainerDied","Data":"191a1d3ddff67eec4b1d0ceb61434ccc38a8668846429b18dc0112fdd677c49f"} Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.351033 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.469526 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-config\") pod \"700d548b-4429-4ba2-b203-ee546c2eb43f\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.469593 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-sb\") pod \"700d548b-4429-4ba2-b203-ee546c2eb43f\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.469838 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-dns-svc\") pod \"700d548b-4429-4ba2-b203-ee546c2eb43f\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.469873 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qcv6\" (UniqueName: \"kubernetes.io/projected/700d548b-4429-4ba2-b203-ee546c2eb43f-kube-api-access-4qcv6\") pod \"700d548b-4429-4ba2-b203-ee546c2eb43f\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.469896 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-nb\") pod \"700d548b-4429-4ba2-b203-ee546c2eb43f\" (UID: \"700d548b-4429-4ba2-b203-ee546c2eb43f\") " Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.483636 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/700d548b-4429-4ba2-b203-ee546c2eb43f-kube-api-access-4qcv6" (OuterVolumeSpecName: "kube-api-access-4qcv6") pod "700d548b-4429-4ba2-b203-ee546c2eb43f" (UID: "700d548b-4429-4ba2-b203-ee546c2eb43f"). InnerVolumeSpecName "kube-api-access-4qcv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.537391 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "700d548b-4429-4ba2-b203-ee546c2eb43f" (UID: "700d548b-4429-4ba2-b203-ee546c2eb43f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.543913 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "700d548b-4429-4ba2-b203-ee546c2eb43f" (UID: "700d548b-4429-4ba2-b203-ee546c2eb43f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.549979 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "700d548b-4429-4ba2-b203-ee546c2eb43f" (UID: "700d548b-4429-4ba2-b203-ee546c2eb43f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.565077 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-config" (OuterVolumeSpecName: "config") pod "700d548b-4429-4ba2-b203-ee546c2eb43f" (UID: "700d548b-4429-4ba2-b203-ee546c2eb43f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.577110 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.577151 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.577163 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.577172 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qcv6\" (UniqueName: \"kubernetes.io/projected/700d548b-4429-4ba2-b203-ee546c2eb43f-kube-api-access-4qcv6\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:46 crc kubenswrapper[4941]: I1130 08:12:46.577183 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/700d548b-4429-4ba2-b203-ee546c2eb43f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.249359 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" event={"ID":"700d548b-4429-4ba2-b203-ee546c2eb43f","Type":"ContainerDied","Data":"d18b9c2dd5550e0b283e041d321e955b57e5b0fb582156c33f934e5f659eb679"} Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.249851 4941 scope.go:117] "RemoveContainer" containerID="191a1d3ddff67eec4b1d0ceb61434ccc38a8668846429b18dc0112fdd677c49f" Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.249565 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8c7b8fc9-zkzn8" Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.309010 4941 scope.go:117] "RemoveContainer" containerID="daee6fd6c5ea69aeb4b1459565c0d92b78312a133fb8923ecb5a4dec7724ed61" Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.345084 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8c7b8fc9-zkzn8"] Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.356984 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8c7b8fc9-zkzn8"] Nov 30 08:12:47 crc kubenswrapper[4941]: I1130 08:12:47.543271 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" path="/var/lib/kubelet/pods/700d548b-4429-4ba2-b203-ee546c2eb43f/volumes" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.670156 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-cqttg"] Nov 30 08:12:48 crc kubenswrapper[4941]: E1130 08:12:48.671123 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerName="dnsmasq-dns" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.671143 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerName="dnsmasq-dns" Nov 30 08:12:48 crc kubenswrapper[4941]: E1130 08:12:48.671173 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerName="init" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.671180 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerName="init" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.671358 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="700d548b-4429-4ba2-b203-ee546c2eb43f" containerName="dnsmasq-dns" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.672039 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.698376 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cqttg"] Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.727088 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb8b92f-01d2-4330-b30e-36310de649ea-operator-scripts\") pod \"cinder-db-create-cqttg\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.727300 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lmtf\" (UniqueName: \"kubernetes.io/projected/6eb8b92f-01d2-4330-b30e-36310de649ea-kube-api-access-9lmtf\") pod \"cinder-db-create-cqttg\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.776111 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3264-account-create-update-m2jhl"] Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.777636 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.782023 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.800794 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3264-account-create-update-m2jhl"] Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.831292 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92489e01-470b-476d-8f67-07e883792c74-operator-scripts\") pod \"cinder-3264-account-create-update-m2jhl\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.831421 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb8b92f-01d2-4330-b30e-36310de649ea-operator-scripts\") pod \"cinder-db-create-cqttg\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.831472 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8mrs\" (UniqueName: \"kubernetes.io/projected/92489e01-470b-476d-8f67-07e883792c74-kube-api-access-v8mrs\") pod \"cinder-3264-account-create-update-m2jhl\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.831536 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lmtf\" (UniqueName: \"kubernetes.io/projected/6eb8b92f-01d2-4330-b30e-36310de649ea-kube-api-access-9lmtf\") pod \"cinder-db-create-cqttg\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.833825 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb8b92f-01d2-4330-b30e-36310de649ea-operator-scripts\") pod \"cinder-db-create-cqttg\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.906413 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lmtf\" (UniqueName: \"kubernetes.io/projected/6eb8b92f-01d2-4330-b30e-36310de649ea-kube-api-access-9lmtf\") pod \"cinder-db-create-cqttg\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.945172 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92489e01-470b-476d-8f67-07e883792c74-operator-scripts\") pod \"cinder-3264-account-create-update-m2jhl\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.945283 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8mrs\" (UniqueName: \"kubernetes.io/projected/92489e01-470b-476d-8f67-07e883792c74-kube-api-access-v8mrs\") pod \"cinder-3264-account-create-update-m2jhl\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.946807 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92489e01-470b-476d-8f67-07e883792c74-operator-scripts\") pod \"cinder-3264-account-create-update-m2jhl\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.989027 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8mrs\" (UniqueName: \"kubernetes.io/projected/92489e01-470b-476d-8f67-07e883792c74-kube-api-access-v8mrs\") pod \"cinder-3264-account-create-update-m2jhl\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:48 crc kubenswrapper[4941]: I1130 08:12:48.997235 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:49 crc kubenswrapper[4941]: I1130 08:12:49.100386 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:49 crc kubenswrapper[4941]: I1130 08:12:49.596573 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-cqttg"] Nov 30 08:12:49 crc kubenswrapper[4941]: I1130 08:12:49.691198 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3264-account-create-update-m2jhl"] Nov 30 08:12:49 crc kubenswrapper[4941]: W1130 08:12:49.703695 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92489e01_470b_476d_8f67_07e883792c74.slice/crio-e9f55ddf465deeacae5608e29b5f3d67106e283746889af99553867cb2123096 WatchSource:0}: Error finding container e9f55ddf465deeacae5608e29b5f3d67106e283746889af99553867cb2123096: Status 404 returned error can't find the container with id e9f55ddf465deeacae5608e29b5f3d67106e283746889af99553867cb2123096 Nov 30 08:12:50 crc kubenswrapper[4941]: I1130 08:12:50.289121 4941 generic.go:334] "Generic (PLEG): container finished" podID="92489e01-470b-476d-8f67-07e883792c74" containerID="1236390d21008415dabca0d67703784dc11afbda5295bf3ab6d81e0cba6b1e68" exitCode=0 Nov 30 08:12:50 crc kubenswrapper[4941]: I1130 08:12:50.289189 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3264-account-create-update-m2jhl" event={"ID":"92489e01-470b-476d-8f67-07e883792c74","Type":"ContainerDied","Data":"1236390d21008415dabca0d67703784dc11afbda5295bf3ab6d81e0cba6b1e68"} Nov 30 08:12:50 crc kubenswrapper[4941]: I1130 08:12:50.289815 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3264-account-create-update-m2jhl" event={"ID":"92489e01-470b-476d-8f67-07e883792c74","Type":"ContainerStarted","Data":"e9f55ddf465deeacae5608e29b5f3d67106e283746889af99553867cb2123096"} Nov 30 08:12:50 crc kubenswrapper[4941]: I1130 08:12:50.296030 4941 generic.go:334] "Generic (PLEG): container finished" podID="6eb8b92f-01d2-4330-b30e-36310de649ea" containerID="045f18d50926302208689fee6ebf15ec7fa81f34795c48723e981361d7598d48" exitCode=0 Nov 30 08:12:50 crc kubenswrapper[4941]: I1130 08:12:50.296105 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cqttg" event={"ID":"6eb8b92f-01d2-4330-b30e-36310de649ea","Type":"ContainerDied","Data":"045f18d50926302208689fee6ebf15ec7fa81f34795c48723e981361d7598d48"} Nov 30 08:12:50 crc kubenswrapper[4941]: I1130 08:12:50.296136 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cqttg" event={"ID":"6eb8b92f-01d2-4330-b30e-36310de649ea","Type":"ContainerStarted","Data":"06c3d512f37665b82727c921e28a6a59418e9b98d06e07f949a1569fe7718d0b"} Nov 30 08:12:51 crc kubenswrapper[4941]: I1130 08:12:51.872207 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:51 crc kubenswrapper[4941]: I1130 08:12:51.877587 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.021611 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92489e01-470b-476d-8f67-07e883792c74-operator-scripts\") pod \"92489e01-470b-476d-8f67-07e883792c74\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.021660 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb8b92f-01d2-4330-b30e-36310de649ea-operator-scripts\") pod \"6eb8b92f-01d2-4330-b30e-36310de649ea\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.021704 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8mrs\" (UniqueName: \"kubernetes.io/projected/92489e01-470b-476d-8f67-07e883792c74-kube-api-access-v8mrs\") pod \"92489e01-470b-476d-8f67-07e883792c74\" (UID: \"92489e01-470b-476d-8f67-07e883792c74\") " Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.021790 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lmtf\" (UniqueName: \"kubernetes.io/projected/6eb8b92f-01d2-4330-b30e-36310de649ea-kube-api-access-9lmtf\") pod \"6eb8b92f-01d2-4330-b30e-36310de649ea\" (UID: \"6eb8b92f-01d2-4330-b30e-36310de649ea\") " Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.022879 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92489e01-470b-476d-8f67-07e883792c74-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "92489e01-470b-476d-8f67-07e883792c74" (UID: "92489e01-470b-476d-8f67-07e883792c74"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.023453 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6eb8b92f-01d2-4330-b30e-36310de649ea-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6eb8b92f-01d2-4330-b30e-36310de649ea" (UID: "6eb8b92f-01d2-4330-b30e-36310de649ea"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.028574 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eb8b92f-01d2-4330-b30e-36310de649ea-kube-api-access-9lmtf" (OuterVolumeSpecName: "kube-api-access-9lmtf") pod "6eb8b92f-01d2-4330-b30e-36310de649ea" (UID: "6eb8b92f-01d2-4330-b30e-36310de649ea"). InnerVolumeSpecName "kube-api-access-9lmtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.029114 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92489e01-470b-476d-8f67-07e883792c74-kube-api-access-v8mrs" (OuterVolumeSpecName: "kube-api-access-v8mrs") pod "92489e01-470b-476d-8f67-07e883792c74" (UID: "92489e01-470b-476d-8f67-07e883792c74"). InnerVolumeSpecName "kube-api-access-v8mrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.124480 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/92489e01-470b-476d-8f67-07e883792c74-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.124569 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6eb8b92f-01d2-4330-b30e-36310de649ea-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.124590 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8mrs\" (UniqueName: \"kubernetes.io/projected/92489e01-470b-476d-8f67-07e883792c74-kube-api-access-v8mrs\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.124647 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lmtf\" (UniqueName: \"kubernetes.io/projected/6eb8b92f-01d2-4330-b30e-36310de649ea-kube-api-access-9lmtf\") on node \"crc\" DevicePath \"\"" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.318000 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3264-account-create-update-m2jhl" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.318014 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3264-account-create-update-m2jhl" event={"ID":"92489e01-470b-476d-8f67-07e883792c74","Type":"ContainerDied","Data":"e9f55ddf465deeacae5608e29b5f3d67106e283746889af99553867cb2123096"} Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.318073 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9f55ddf465deeacae5608e29b5f3d67106e283746889af99553867cb2123096" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.320515 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-cqttg" event={"ID":"6eb8b92f-01d2-4330-b30e-36310de649ea","Type":"ContainerDied","Data":"06c3d512f37665b82727c921e28a6a59418e9b98d06e07f949a1569fe7718d0b"} Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.320550 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06c3d512f37665b82727c921e28a6a59418e9b98d06e07f949a1569fe7718d0b" Nov 30 08:12:52 crc kubenswrapper[4941]: I1130 08:12:52.320665 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-cqttg" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.957147 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-jnlt5"] Nov 30 08:12:53 crc kubenswrapper[4941]: E1130 08:12:53.958065 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eb8b92f-01d2-4330-b30e-36310de649ea" containerName="mariadb-database-create" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.958082 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eb8b92f-01d2-4330-b30e-36310de649ea" containerName="mariadb-database-create" Nov 30 08:12:53 crc kubenswrapper[4941]: E1130 08:12:53.958106 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92489e01-470b-476d-8f67-07e883792c74" containerName="mariadb-account-create-update" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.958112 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="92489e01-470b-476d-8f67-07e883792c74" containerName="mariadb-account-create-update" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.958330 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="92489e01-470b-476d-8f67-07e883792c74" containerName="mariadb-account-create-update" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.958377 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eb8b92f-01d2-4330-b30e-36310de649ea" containerName="mariadb-database-create" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.959102 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.962452 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.962631 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.962545 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kcf4j" Nov 30 08:12:53 crc kubenswrapper[4941]: I1130 08:12:53.968630 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-jnlt5"] Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.097769 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-scripts\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.098207 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-config-data\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.098587 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-etc-machine-id\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.098875 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-combined-ca-bundle\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.099027 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-db-sync-config-data\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.099174 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqdj4\" (UniqueName: \"kubernetes.io/projected/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-kube-api-access-bqdj4\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.200755 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-config-data\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.200855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-etc-machine-id\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.200933 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-combined-ca-bundle\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.200966 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-db-sync-config-data\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.201006 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqdj4\" (UniqueName: \"kubernetes.io/projected/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-kube-api-access-bqdj4\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.201039 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-scripts\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.201715 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-etc-machine-id\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.208069 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-scripts\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.208973 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-config-data\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.209372 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-combined-ca-bundle\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.218058 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-db-sync-config-data\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.226879 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqdj4\" (UniqueName: \"kubernetes.io/projected/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-kube-api-access-bqdj4\") pod \"cinder-db-sync-jnlt5\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.295037 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:12:54 crc kubenswrapper[4941]: I1130 08:12:54.842723 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-jnlt5"] Nov 30 08:12:55 crc kubenswrapper[4941]: I1130 08:12:55.358570 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-jnlt5" event={"ID":"22cf5e10-17a3-4530-9caf-cfa2f2b697bf","Type":"ContainerStarted","Data":"2ac166c703bc0f750a5e5da1fd35fa8cd0d99a1b0854bf0a142fc9bda1aedaa7"} Nov 30 08:13:15 crc kubenswrapper[4941]: E1130 08:13:15.934732 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd" Nov 30 08:13:15 crc kubenswrapper[4941]: E1130 08:13:15.935587 4941 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd" Nov 30 08:13:15 crc kubenswrapper[4941]: E1130 08:13:15.935778 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bqdj4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-jnlt5_openstack(22cf5e10-17a3-4530-9caf-cfa2f2b697bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 08:13:15 crc kubenswrapper[4941]: E1130 08:13:15.937052 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-jnlt5" podUID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" Nov 30 08:13:16 crc kubenswrapper[4941]: E1130 08:13:16.633833 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd\\\"\"" pod="openstack/cinder-db-sync-jnlt5" podUID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" Nov 30 08:13:31 crc kubenswrapper[4941]: I1130 08:13:31.822230 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-jnlt5" event={"ID":"22cf5e10-17a3-4530-9caf-cfa2f2b697bf","Type":"ContainerStarted","Data":"563b508e086b662d6201afd4dbec37e76089d0dda8721b275abf1157af9545b2"} Nov 30 08:13:31 crc kubenswrapper[4941]: I1130 08:13:31.864242 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-jnlt5" podStartSLOduration=2.983677278 podStartE2EDuration="38.864205135s" podCreationTimestamp="2025-11-30 08:12:53 +0000 UTC" firstStartedPulling="2025-11-30 08:12:54.860603148 +0000 UTC m=+5195.628774757" lastFinishedPulling="2025-11-30 08:13:30.741130965 +0000 UTC m=+5231.509302614" observedRunningTime="2025-11-30 08:13:31.851199163 +0000 UTC m=+5232.619370782" watchObservedRunningTime="2025-11-30 08:13:31.864205135 +0000 UTC m=+5232.632376754" Nov 30 08:13:32 crc kubenswrapper[4941]: I1130 08:13:32.978370 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:13:32 crc kubenswrapper[4941]: I1130 08:13:32.978458 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:13:34 crc kubenswrapper[4941]: I1130 08:13:34.864941 4941 generic.go:334] "Generic (PLEG): container finished" podID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" containerID="563b508e086b662d6201afd4dbec37e76089d0dda8721b275abf1157af9545b2" exitCode=0 Nov 30 08:13:34 crc kubenswrapper[4941]: I1130 08:13:34.865042 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-jnlt5" event={"ID":"22cf5e10-17a3-4530-9caf-cfa2f2b697bf","Type":"ContainerDied","Data":"563b508e086b662d6201afd4dbec37e76089d0dda8721b275abf1157af9545b2"} Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.288002 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413318 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-db-sync-config-data\") pod \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413430 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-etc-machine-id\") pod \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413553 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqdj4\" (UniqueName: \"kubernetes.io/projected/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-kube-api-access-bqdj4\") pod \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413565 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "22cf5e10-17a3-4530-9caf-cfa2f2b697bf" (UID: "22cf5e10-17a3-4530-9caf-cfa2f2b697bf"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413591 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-config-data\") pod \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413720 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-combined-ca-bundle\") pod \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.413895 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-scripts\") pod \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\" (UID: \"22cf5e10-17a3-4530-9caf-cfa2f2b697bf\") " Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.414941 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.419610 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "22cf5e10-17a3-4530-9caf-cfa2f2b697bf" (UID: "22cf5e10-17a3-4530-9caf-cfa2f2b697bf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.419635 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-scripts" (OuterVolumeSpecName: "scripts") pod "22cf5e10-17a3-4530-9caf-cfa2f2b697bf" (UID: "22cf5e10-17a3-4530-9caf-cfa2f2b697bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.419713 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-kube-api-access-bqdj4" (OuterVolumeSpecName: "kube-api-access-bqdj4") pod "22cf5e10-17a3-4530-9caf-cfa2f2b697bf" (UID: "22cf5e10-17a3-4530-9caf-cfa2f2b697bf"). InnerVolumeSpecName "kube-api-access-bqdj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.445574 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22cf5e10-17a3-4530-9caf-cfa2f2b697bf" (UID: "22cf5e10-17a3-4530-9caf-cfa2f2b697bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.470114 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-config-data" (OuterVolumeSpecName: "config-data") pod "22cf5e10-17a3-4530-9caf-cfa2f2b697bf" (UID: "22cf5e10-17a3-4530-9caf-cfa2f2b697bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.516517 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqdj4\" (UniqueName: \"kubernetes.io/projected/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-kube-api-access-bqdj4\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.516558 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.516568 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.516582 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.516591 4941 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/22cf5e10-17a3-4530-9caf-cfa2f2b697bf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.896837 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-jnlt5" event={"ID":"22cf5e10-17a3-4530-9caf-cfa2f2b697bf","Type":"ContainerDied","Data":"2ac166c703bc0f750a5e5da1fd35fa8cd0d99a1b0854bf0a142fc9bda1aedaa7"} Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.896915 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ac166c703bc0f750a5e5da1fd35fa8cd0d99a1b0854bf0a142fc9bda1aedaa7" Nov 30 08:13:36 crc kubenswrapper[4941]: I1130 08:13:36.896968 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-jnlt5" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.303227 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7474c9cf97-chjhn"] Nov 30 08:13:37 crc kubenswrapper[4941]: E1130 08:13:37.303748 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" containerName="cinder-db-sync" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.303762 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" containerName="cinder-db-sync" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.303961 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" containerName="cinder-db-sync" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.305137 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.326088 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7474c9cf97-chjhn"] Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.441778 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.442081 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-sb\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.442158 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tljgt\" (UniqueName: \"kubernetes.io/projected/bea2342a-e7e2-4154-b873-c552f6c6fdbb-kube-api-access-tljgt\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.442223 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-config\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.442344 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-nb\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.544543 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.544633 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-sb\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.544661 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tljgt\" (UniqueName: \"kubernetes.io/projected/bea2342a-e7e2-4154-b873-c552f6c6fdbb-kube-api-access-tljgt\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.544691 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-config\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.544727 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-nb\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.545665 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-nb\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.546344 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.546886 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-sb\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.546956 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.547688 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-config\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.548595 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.559995 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-kcf4j" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.560221 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.567400 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.568232 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.575904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tljgt\" (UniqueName: \"kubernetes.io/projected/bea2342a-e7e2-4154-b873-c552f6c6fdbb-kube-api-access-tljgt\") pod \"dnsmasq-dns-7474c9cf97-chjhn\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.581626 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.637842 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.647246 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed051ad7-48ac-415d-b3b2-e58662aa030c-logs\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.647303 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdjzd\" (UniqueName: \"kubernetes.io/projected/ed051ad7-48ac-415d-b3b2-e58662aa030c-kube-api-access-wdjzd\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.647865 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-scripts\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.647935 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.648093 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed051ad7-48ac-415d-b3b2-e58662aa030c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.648122 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data-custom\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.648257 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750474 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed051ad7-48ac-415d-b3b2-e58662aa030c-logs\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750541 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdjzd\" (UniqueName: \"kubernetes.io/projected/ed051ad7-48ac-415d-b3b2-e58662aa030c-kube-api-access-wdjzd\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750625 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-scripts\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750653 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750703 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed051ad7-48ac-415d-b3b2-e58662aa030c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750725 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data-custom\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.750773 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.751164 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed051ad7-48ac-415d-b3b2-e58662aa030c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.751255 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed051ad7-48ac-415d-b3b2-e58662aa030c-logs\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.754871 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-scripts\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.756307 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.757095 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data-custom\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.771699 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.775562 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdjzd\" (UniqueName: \"kubernetes.io/projected/ed051ad7-48ac-415d-b3b2-e58662aa030c-kube-api-access-wdjzd\") pod \"cinder-api-0\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " pod="openstack/cinder-api-0" Nov 30 08:13:37 crc kubenswrapper[4941]: I1130 08:13:37.867790 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 08:13:38 crc kubenswrapper[4941]: I1130 08:13:38.188657 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7474c9cf97-chjhn"] Nov 30 08:13:38 crc kubenswrapper[4941]: W1130 08:13:38.474211 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded051ad7_48ac_415d_b3b2_e58662aa030c.slice/crio-8e38e8f8a425ac404471f74d7ba05a1c7606887d2bf4ec97459ea71c57a1307e WatchSource:0}: Error finding container 8e38e8f8a425ac404471f74d7ba05a1c7606887d2bf4ec97459ea71c57a1307e: Status 404 returned error can't find the container with id 8e38e8f8a425ac404471f74d7ba05a1c7606887d2bf4ec97459ea71c57a1307e Nov 30 08:13:38 crc kubenswrapper[4941]: I1130 08:13:38.474314 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:13:38 crc kubenswrapper[4941]: I1130 08:13:38.919773 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed051ad7-48ac-415d-b3b2-e58662aa030c","Type":"ContainerStarted","Data":"8e38e8f8a425ac404471f74d7ba05a1c7606887d2bf4ec97459ea71c57a1307e"} Nov 30 08:13:38 crc kubenswrapper[4941]: I1130 08:13:38.921837 4941 generic.go:334] "Generic (PLEG): container finished" podID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerID="4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496" exitCode=0 Nov 30 08:13:38 crc kubenswrapper[4941]: I1130 08:13:38.921875 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" event={"ID":"bea2342a-e7e2-4154-b873-c552f6c6fdbb","Type":"ContainerDied","Data":"4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496"} Nov 30 08:13:38 crc kubenswrapper[4941]: I1130 08:13:38.921897 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" event={"ID":"bea2342a-e7e2-4154-b873-c552f6c6fdbb","Type":"ContainerStarted","Data":"8e406a7ada6b9d030fa5b6c6e1d605f511e2c6f58b775410f06295bdb321081f"} Nov 30 08:13:39 crc kubenswrapper[4941]: I1130 08:13:39.962228 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed051ad7-48ac-415d-b3b2-e58662aa030c","Type":"ContainerStarted","Data":"f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a"} Nov 30 08:13:39 crc kubenswrapper[4941]: I1130 08:13:39.974130 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" event={"ID":"bea2342a-e7e2-4154-b873-c552f6c6fdbb","Type":"ContainerStarted","Data":"9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f"} Nov 30 08:13:39 crc kubenswrapper[4941]: I1130 08:13:39.976322 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:40 crc kubenswrapper[4941]: I1130 08:13:40.004789 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" podStartSLOduration=3.00476807 podStartE2EDuration="3.00476807s" podCreationTimestamp="2025-11-30 08:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:40.000459187 +0000 UTC m=+5240.768630806" watchObservedRunningTime="2025-11-30 08:13:40.00476807 +0000 UTC m=+5240.772939679" Nov 30 08:13:40 crc kubenswrapper[4941]: I1130 08:13:40.987684 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed051ad7-48ac-415d-b3b2-e58662aa030c","Type":"ContainerStarted","Data":"41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3"} Nov 30 08:13:40 crc kubenswrapper[4941]: I1130 08:13:40.988273 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 30 08:13:41 crc kubenswrapper[4941]: I1130 08:13:41.016564 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.016534255 podStartE2EDuration="4.016534255s" podCreationTimestamp="2025-11-30 08:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:41.008847747 +0000 UTC m=+5241.777019376" watchObservedRunningTime="2025-11-30 08:13:41.016534255 +0000 UTC m=+5241.784705864" Nov 30 08:13:47 crc kubenswrapper[4941]: I1130 08:13:47.639595 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:13:47 crc kubenswrapper[4941]: I1130 08:13:47.728637 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cc47cc7dc-hqb9q"] Nov 30 08:13:47 crc kubenswrapper[4941]: I1130 08:13:47.728902 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" containerName="dnsmasq-dns" containerID="cri-o://db9094ecc70fe8683173f019336a6ad27bda82b378a9046547199c2aa8ffabcf" gracePeriod=10 Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.090829 4941 generic.go:334] "Generic (PLEG): container finished" podID="195d353f-b5fb-48a4-a458-625ccede1eee" containerID="db9094ecc70fe8683173f019336a6ad27bda82b378a9046547199c2aa8ffabcf" exitCode=0 Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.090871 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" event={"ID":"195d353f-b5fb-48a4-a458-625ccede1eee","Type":"ContainerDied","Data":"db9094ecc70fe8683173f019336a6ad27bda82b378a9046547199c2aa8ffabcf"} Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.302963 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.437590 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-sb\") pod \"195d353f-b5fb-48a4-a458-625ccede1eee\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.437654 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpzk2\" (UniqueName: \"kubernetes.io/projected/195d353f-b5fb-48a4-a458-625ccede1eee-kube-api-access-vpzk2\") pod \"195d353f-b5fb-48a4-a458-625ccede1eee\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.437766 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-config\") pod \"195d353f-b5fb-48a4-a458-625ccede1eee\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.437892 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-dns-svc\") pod \"195d353f-b5fb-48a4-a458-625ccede1eee\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.437923 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-nb\") pod \"195d353f-b5fb-48a4-a458-625ccede1eee\" (UID: \"195d353f-b5fb-48a4-a458-625ccede1eee\") " Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.456771 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/195d353f-b5fb-48a4-a458-625ccede1eee-kube-api-access-vpzk2" (OuterVolumeSpecName: "kube-api-access-vpzk2") pod "195d353f-b5fb-48a4-a458-625ccede1eee" (UID: "195d353f-b5fb-48a4-a458-625ccede1eee"). InnerVolumeSpecName "kube-api-access-vpzk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.487671 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "195d353f-b5fb-48a4-a458-625ccede1eee" (UID: "195d353f-b5fb-48a4-a458-625ccede1eee"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.491525 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "195d353f-b5fb-48a4-a458-625ccede1eee" (UID: "195d353f-b5fb-48a4-a458-625ccede1eee"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.513746 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "195d353f-b5fb-48a4-a458-625ccede1eee" (UID: "195d353f-b5fb-48a4-a458-625ccede1eee"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.540876 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpzk2\" (UniqueName: \"kubernetes.io/projected/195d353f-b5fb-48a4-a458-625ccede1eee-kube-api-access-vpzk2\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.540919 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.540932 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.540945 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.540967 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-config" (OuterVolumeSpecName: "config") pod "195d353f-b5fb-48a4-a458-625ccede1eee" (UID: "195d353f-b5fb-48a4-a458-625ccede1eee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:13:48 crc kubenswrapper[4941]: I1130 08:13:48.642790 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/195d353f-b5fb-48a4-a458-625ccede1eee-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.101401 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" event={"ID":"195d353f-b5fb-48a4-a458-625ccede1eee","Type":"ContainerDied","Data":"5c193e27e2dcfc8962e79ec2840ea4dc5afa1f4293b6ac889468e6e770648b78"} Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.101491 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cc47cc7dc-hqb9q" Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.101675 4941 scope.go:117] "RemoveContainer" containerID="db9094ecc70fe8683173f019336a6ad27bda82b378a9046547199c2aa8ffabcf" Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.126610 4941 scope.go:117] "RemoveContainer" containerID="4544d781bbc6dff34cde2d82fc3e5f307c9da333eccedd3f3f3cee2b21a652af" Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.180976 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cc47cc7dc-hqb9q"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.207033 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cc47cc7dc-hqb9q"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.284722 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.285039 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a9a5e3c2-ae0c-4884-a48b-705bfda74cda" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://d91b3f0bed97a8e8c12e9e1e3c39e9df136efaab2861f66263721f487a5145a3" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.299228 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.299589 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="b22ca75a-372d-42f8-89ad-9a8c88546f58" containerName="nova-cell0-conductor-conductor" containerID="cri-o://be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.313945 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.314286 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" containerName="nova-scheduler-scheduler" containerID="cri-o://8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.324523 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.326186 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-log" containerID="cri-o://926e052d09277df688e2ccb6fc589fb80443d0b11fcbead3f28c910a019567ac" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.326713 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-api" containerID="cri-o://db89befe1a9d31d87ddb66aac310b14ab7e2b2257300dc83ef801b616616ca54" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.338940 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.339223 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-log" containerID="cri-o://d50d2061a1e8afb52fd48ecb439a4f86d9f04f93eca330b76817ea6e475d0ec4" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.339698 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-metadata" containerID="cri-o://23ed605cc707fb420bacbb8275741163005a6058a7a0bb3cdecf35773dc9d078" gracePeriod=30 Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.534437 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" path="/var/lib/kubelet/pods/195d353f-b5fb-48a4-a458-625ccede1eee/volumes" Nov 30 08:13:49 crc kubenswrapper[4941]: I1130 08:13:49.825020 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.114344 4941 generic.go:334] "Generic (PLEG): container finished" podID="a9a5e3c2-ae0c-4884-a48b-705bfda74cda" containerID="d91b3f0bed97a8e8c12e9e1e3c39e9df136efaab2861f66263721f487a5145a3" exitCode=0 Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.114389 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a9a5e3c2-ae0c-4884-a48b-705bfda74cda","Type":"ContainerDied","Data":"d91b3f0bed97a8e8c12e9e1e3c39e9df136efaab2861f66263721f487a5145a3"} Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.116419 4941 generic.go:334] "Generic (PLEG): container finished" podID="48be0b99-919f-463f-b80d-8d737b43a70b" containerID="d50d2061a1e8afb52fd48ecb439a4f86d9f04f93eca330b76817ea6e475d0ec4" exitCode=143 Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.116497 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"48be0b99-919f-463f-b80d-8d737b43a70b","Type":"ContainerDied","Data":"d50d2061a1e8afb52fd48ecb439a4f86d9f04f93eca330b76817ea6e475d0ec4"} Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.118401 4941 generic.go:334] "Generic (PLEG): container finished" podID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerID="926e052d09277df688e2ccb6fc589fb80443d0b11fcbead3f28c910a019567ac" exitCode=143 Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.118429 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c6ac8f7-56ec-4d98-985e-c8571887dcb9","Type":"ContainerDied","Data":"926e052d09277df688e2ccb6fc589fb80443d0b11fcbead3f28c910a019567ac"} Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.861012 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:50 crc kubenswrapper[4941]: I1130 08:13:50.866425 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.013530 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckvh9\" (UniqueName: \"kubernetes.io/projected/b22ca75a-372d-42f8-89ad-9a8c88546f58-kube-api-access-ckvh9\") pod \"b22ca75a-372d-42f8-89ad-9a8c88546f58\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.013603 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-config-data\") pod \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.013661 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-combined-ca-bundle\") pod \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.013696 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-config-data\") pod \"b22ca75a-372d-42f8-89ad-9a8c88546f58\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.013826 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l65lq\" (UniqueName: \"kubernetes.io/projected/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-kube-api-access-l65lq\") pod \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\" (UID: \"a9a5e3c2-ae0c-4884-a48b-705bfda74cda\") " Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.013993 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-combined-ca-bundle\") pod \"b22ca75a-372d-42f8-89ad-9a8c88546f58\" (UID: \"b22ca75a-372d-42f8-89ad-9a8c88546f58\") " Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.023628 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-kube-api-access-l65lq" (OuterVolumeSpecName: "kube-api-access-l65lq") pod "a9a5e3c2-ae0c-4884-a48b-705bfda74cda" (UID: "a9a5e3c2-ae0c-4884-a48b-705bfda74cda"). InnerVolumeSpecName "kube-api-access-l65lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.037258 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b22ca75a-372d-42f8-89ad-9a8c88546f58-kube-api-access-ckvh9" (OuterVolumeSpecName: "kube-api-access-ckvh9") pod "b22ca75a-372d-42f8-89ad-9a8c88546f58" (UID: "b22ca75a-372d-42f8-89ad-9a8c88546f58"). InnerVolumeSpecName "kube-api-access-ckvh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.043532 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-config-data" (OuterVolumeSpecName: "config-data") pod "a9a5e3c2-ae0c-4884-a48b-705bfda74cda" (UID: "a9a5e3c2-ae0c-4884-a48b-705bfda74cda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.044299 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-config-data" (OuterVolumeSpecName: "config-data") pod "b22ca75a-372d-42f8-89ad-9a8c88546f58" (UID: "b22ca75a-372d-42f8-89ad-9a8c88546f58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.047255 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9a5e3c2-ae0c-4884-a48b-705bfda74cda" (UID: "a9a5e3c2-ae0c-4884-a48b-705bfda74cda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.053121 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b22ca75a-372d-42f8-89ad-9a8c88546f58" (UID: "b22ca75a-372d-42f8-89ad-9a8c88546f58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.116894 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckvh9\" (UniqueName: \"kubernetes.io/projected/b22ca75a-372d-42f8-89ad-9a8c88546f58-kube-api-access-ckvh9\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.116945 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.116974 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.116984 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.116998 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l65lq\" (UniqueName: \"kubernetes.io/projected/a9a5e3c2-ae0c-4884-a48b-705bfda74cda-kube-api-access-l65lq\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.117009 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b22ca75a-372d-42f8-89ad-9a8c88546f58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.128512 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a9a5e3c2-ae0c-4884-a48b-705bfda74cda","Type":"ContainerDied","Data":"af0ea63ed474bf377a39115bb8e95a0b292e3ea5bc7a3e6bdb4cb02c25be1f49"} Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.128574 4941 scope.go:117] "RemoveContainer" containerID="d91b3f0bed97a8e8c12e9e1e3c39e9df136efaab2861f66263721f487a5145a3" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.128680 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.136726 4941 generic.go:334] "Generic (PLEG): container finished" podID="b22ca75a-372d-42f8-89ad-9a8c88546f58" containerID="be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667" exitCode=0 Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.136780 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b22ca75a-372d-42f8-89ad-9a8c88546f58","Type":"ContainerDied","Data":"be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667"} Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.136813 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b22ca75a-372d-42f8-89ad-9a8c88546f58","Type":"ContainerDied","Data":"81fdac5de95498187ff4135e183a7f8585c4a23f78efc518ab0ba5d6c7debcf4"} Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.136873 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.185810 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.202706 4941 scope.go:117] "RemoveContainer" containerID="be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.211587 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.224952 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.242272 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.255609 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: E1130 08:13:51.256087 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" containerName="init" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256107 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" containerName="init" Nov 30 08:13:51 crc kubenswrapper[4941]: E1130 08:13:51.256136 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22ca75a-372d-42f8-89ad-9a8c88546f58" containerName="nova-cell0-conductor-conductor" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256146 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22ca75a-372d-42f8-89ad-9a8c88546f58" containerName="nova-cell0-conductor-conductor" Nov 30 08:13:51 crc kubenswrapper[4941]: E1130 08:13:51.256162 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a5e3c2-ae0c-4884-a48b-705bfda74cda" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256169 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a5e3c2-ae0c-4884-a48b-705bfda74cda" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 08:13:51 crc kubenswrapper[4941]: E1130 08:13:51.256182 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" containerName="dnsmasq-dns" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256188 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" containerName="dnsmasq-dns" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256384 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a5e3c2-ae0c-4884-a48b-705bfda74cda" containerName="nova-cell1-novncproxy-novncproxy" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256399 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22ca75a-372d-42f8-89ad-9a8c88546f58" containerName="nova-cell0-conductor-conductor" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.256422 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="195d353f-b5fb-48a4-a458-625ccede1eee" containerName="dnsmasq-dns" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.257659 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.259657 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.266462 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.275561 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.276938 4941 scope.go:117] "RemoveContainer" containerID="be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.277717 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.284778 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.286033 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 30 08:13:51 crc kubenswrapper[4941]: E1130 08:13:51.286269 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667\": container with ID starting with be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667 not found: ID does not exist" containerID="be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.286296 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667"} err="failed to get container status \"be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667\": rpc error: code = NotFound desc = could not find container \"be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667\": container with ID starting with be0fb0e2e7286ec723980d15b7c42065d48f0df1d822be3992e7df3a079dd667 not found: ID does not exist" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.321223 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxkn6\" (UniqueName: \"kubernetes.io/projected/4b88b054-0071-4404-8749-7348030d171b-kube-api-access-cxkn6\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.321341 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b88b054-0071-4404-8749-7348030d171b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.321376 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b88b054-0071-4404-8749-7348030d171b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.425641 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr5jq\" (UniqueName: \"kubernetes.io/projected/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-kube-api-access-kr5jq\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.425717 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.425787 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b88b054-0071-4404-8749-7348030d171b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.425822 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b88b054-0071-4404-8749-7348030d171b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.425909 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.425938 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxkn6\" (UniqueName: \"kubernetes.io/projected/4b88b054-0071-4404-8749-7348030d171b-kube-api-access-cxkn6\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.432080 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b88b054-0071-4404-8749-7348030d171b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.432623 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b88b054-0071-4404-8749-7348030d171b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.453870 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxkn6\" (UniqueName: \"kubernetes.io/projected/4b88b054-0071-4404-8749-7348030d171b-kube-api-access-cxkn6\") pod \"nova-cell1-novncproxy-0\" (UID: \"4b88b054-0071-4404-8749-7348030d171b\") " pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.527023 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.527083 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr5jq\" (UniqueName: \"kubernetes.io/projected/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-kube-api-access-kr5jq\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.527117 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.531024 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.532077 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.533782 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a5e3c2-ae0c-4884-a48b-705bfda74cda" path="/var/lib/kubelet/pods/a9a5e3c2-ae0c-4884-a48b-705bfda74cda/volumes" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.534354 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b22ca75a-372d-42f8-89ad-9a8c88546f58" path="/var/lib/kubelet/pods/b22ca75a-372d-42f8-89ad-9a8c88546f58/volumes" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.549839 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr5jq\" (UniqueName: \"kubernetes.io/projected/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-kube-api-access-kr5jq\") pod \"nova-cell0-conductor-0\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.610380 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:51 crc kubenswrapper[4941]: I1130 08:13:51.621507 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:52 crc kubenswrapper[4941]: I1130 08:13:52.108513 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 30 08:13:52 crc kubenswrapper[4941]: I1130 08:13:52.158701 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4b88b054-0071-4404-8749-7348030d171b","Type":"ContainerStarted","Data":"4e3aab9fca49aae3e67370f976e0b60076feac5196ecd282ffe3d31434b4e9f3"} Nov 30 08:13:52 crc kubenswrapper[4941]: I1130 08:13:52.197061 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 08:13:52 crc kubenswrapper[4941]: W1130 08:13:52.202375 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b3e4a1c_882b_4f48_a882_4dc8fe7eabf6.slice/crio-1a2c24acd376672a052fbc3e3d6fb001c932ae580492235f12f1fc24ebee5645 WatchSource:0}: Error finding container 1a2c24acd376672a052fbc3e3d6fb001c932ae580492235f12f1fc24ebee5645: Status 404 returned error can't find the container with id 1a2c24acd376672a052fbc3e3d6fb001c932ae580492235f12f1fc24ebee5645 Nov 30 08:13:52 crc kubenswrapper[4941]: I1130 08:13:52.546918 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:13:52 crc kubenswrapper[4941]: I1130 08:13:52.547573 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" containerName="nova-cell1-conductor-conductor" containerID="cri-o://a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af" gracePeriod=30 Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.198103 4941 generic.go:334] "Generic (PLEG): container finished" podID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerID="db89befe1a9d31d87ddb66aac310b14ab7e2b2257300dc83ef801b616616ca54" exitCode=0 Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.198406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c6ac8f7-56ec-4d98-985e-c8571887dcb9","Type":"ContainerDied","Data":"db89befe1a9d31d87ddb66aac310b14ab7e2b2257300dc83ef801b616616ca54"} Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.200807 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6","Type":"ContainerStarted","Data":"da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942"} Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.200832 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6","Type":"ContainerStarted","Data":"1a2c24acd376672a052fbc3e3d6fb001c932ae580492235f12f1fc24ebee5645"} Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.201170 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.208985 4941 generic.go:334] "Generic (PLEG): container finished" podID="48be0b99-919f-463f-b80d-8d737b43a70b" containerID="23ed605cc707fb420bacbb8275741163005a6058a7a0bb3cdecf35773dc9d078" exitCode=0 Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.209182 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"48be0b99-919f-463f-b80d-8d737b43a70b","Type":"ContainerDied","Data":"23ed605cc707fb420bacbb8275741163005a6058a7a0bb3cdecf35773dc9d078"} Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.215469 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"4b88b054-0071-4404-8749-7348030d171b","Type":"ContainerStarted","Data":"9fd44aec0a964596c5bf009862cb986cd9222a859b541f0c7163d50dc2723d0e"} Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.225858 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.225838559 podStartE2EDuration="2.225838559s" podCreationTimestamp="2025-11-30 08:13:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:53.222901208 +0000 UTC m=+5253.991072817" watchObservedRunningTime="2025-11-30 08:13:53.225838559 +0000 UTC m=+5253.994010168" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.266423 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.2664008239999998 podStartE2EDuration="2.266400824s" podCreationTimestamp="2025-11-30 08:13:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:53.261865174 +0000 UTC m=+5254.030036783" watchObservedRunningTime="2025-11-30 08:13:53.266400824 +0000 UTC m=+5254.034572433" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.431416 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.444040 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.476794 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7cpk\" (UniqueName: \"kubernetes.io/projected/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-kube-api-access-t7cpk\") pod \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477243 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-combined-ca-bundle\") pod \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477454 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-config-data\") pod \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477517 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2rgg\" (UniqueName: \"kubernetes.io/projected/48be0b99-919f-463f-b80d-8d737b43a70b-kube-api-access-g2rgg\") pod \"48be0b99-919f-463f-b80d-8d737b43a70b\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477603 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48be0b99-919f-463f-b80d-8d737b43a70b-logs\") pod \"48be0b99-919f-463f-b80d-8d737b43a70b\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477694 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-config-data\") pod \"48be0b99-919f-463f-b80d-8d737b43a70b\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477759 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-combined-ca-bundle\") pod \"48be0b99-919f-463f-b80d-8d737b43a70b\" (UID: \"48be0b99-919f-463f-b80d-8d737b43a70b\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.477792 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-logs\") pod \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\" (UID: \"9c6ac8f7-56ec-4d98-985e-c8571887dcb9\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.482422 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48be0b99-919f-463f-b80d-8d737b43a70b-logs" (OuterVolumeSpecName: "logs") pod "48be0b99-919f-463f-b80d-8d737b43a70b" (UID: "48be0b99-919f-463f-b80d-8d737b43a70b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.483195 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-logs" (OuterVolumeSpecName: "logs") pod "9c6ac8f7-56ec-4d98-985e-c8571887dcb9" (UID: "9c6ac8f7-56ec-4d98-985e-c8571887dcb9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.496124 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-kube-api-access-t7cpk" (OuterVolumeSpecName: "kube-api-access-t7cpk") pod "9c6ac8f7-56ec-4d98-985e-c8571887dcb9" (UID: "9c6ac8f7-56ec-4d98-985e-c8571887dcb9"). InnerVolumeSpecName "kube-api-access-t7cpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.503901 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48be0b99-919f-463f-b80d-8d737b43a70b-kube-api-access-g2rgg" (OuterVolumeSpecName: "kube-api-access-g2rgg") pod "48be0b99-919f-463f-b80d-8d737b43a70b" (UID: "48be0b99-919f-463f-b80d-8d737b43a70b"). InnerVolumeSpecName "kube-api-access-g2rgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.541065 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c6ac8f7-56ec-4d98-985e-c8571887dcb9" (UID: "9c6ac8f7-56ec-4d98-985e-c8571887dcb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.555605 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-config-data" (OuterVolumeSpecName: "config-data") pod "48be0b99-919f-463f-b80d-8d737b43a70b" (UID: "48be0b99-919f-463f-b80d-8d737b43a70b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.556181 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-config-data" (OuterVolumeSpecName: "config-data") pod "9c6ac8f7-56ec-4d98-985e-c8571887dcb9" (UID: "9c6ac8f7-56ec-4d98-985e-c8571887dcb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579667 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579699 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2rgg\" (UniqueName: \"kubernetes.io/projected/48be0b99-919f-463f-b80d-8d737b43a70b-kube-api-access-g2rgg\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579713 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48be0b99-919f-463f-b80d-8d737b43a70b-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579723 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579734 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579746 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7cpk\" (UniqueName: \"kubernetes.io/projected/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-kube-api-access-t7cpk\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.579755 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c6ac8f7-56ec-4d98-985e-c8571887dcb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.582587 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48be0b99-919f-463f-b80d-8d737b43a70b" (UID: "48be0b99-919f-463f-b80d-8d737b43a70b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.686131 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48be0b99-919f-463f-b80d-8d737b43a70b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: E1130 08:13:53.689075 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 08:13:53 crc kubenswrapper[4941]: E1130 08:13:53.692561 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 08:13:53 crc kubenswrapper[4941]: E1130 08:13:53.702041 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 08:13:53 crc kubenswrapper[4941]: E1130 08:13:53.702150 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" containerName="nova-cell1-conductor-conductor" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.810064 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.890528 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-config-data\") pod \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.890776 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-combined-ca-bundle\") pod \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.890904 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6bvl\" (UniqueName: \"kubernetes.io/projected/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-kube-api-access-j6bvl\") pod \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\" (UID: \"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea\") " Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.896939 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-kube-api-access-j6bvl" (OuterVolumeSpecName: "kube-api-access-j6bvl") pod "6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" (UID: "6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea"). InnerVolumeSpecName "kube-api-access-j6bvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.919880 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" (UID: "6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.921104 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-config-data" (OuterVolumeSpecName: "config-data") pod "6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" (UID: "6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.994198 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.994242 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:53 crc kubenswrapper[4941]: I1130 08:13:53.994255 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6bvl\" (UniqueName: \"kubernetes.io/projected/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea-kube-api-access-j6bvl\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.235003 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9c6ac8f7-56ec-4d98-985e-c8571887dcb9","Type":"ContainerDied","Data":"3b98280ce9ccff212947bb91587bd21de0b9258a53152aa24900f61b0cab2c33"} Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.235562 4941 scope.go:117] "RemoveContainer" containerID="db89befe1a9d31d87ddb66aac310b14ab7e2b2257300dc83ef801b616616ca54" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.234985 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.245733 4941 generic.go:334] "Generic (PLEG): container finished" podID="6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" containerID="8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5" exitCode=0 Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.245879 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea","Type":"ContainerDied","Data":"8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5"} Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.245928 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea","Type":"ContainerDied","Data":"c578cbc2ae17963dee63b54613389c1165bd3973777b6daf4208985d94daa307"} Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.246036 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.254546 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"48be0b99-919f-463f-b80d-8d737b43a70b","Type":"ContainerDied","Data":"d7aa162b479b9dabff054a473a95943e9043ca9320658de22e4b31a33cc38372"} Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.254658 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.286946 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.306819 4941 scope.go:117] "RemoveContainer" containerID="926e052d09277df688e2ccb6fc589fb80443d0b11fcbead3f28c910a019567ac" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.314497 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.332472 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: E1130 08:13:54.332981 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-api" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.332998 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-api" Nov 30 08:13:54 crc kubenswrapper[4941]: E1130 08:13:54.333014 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-log" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333020 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-log" Nov 30 08:13:54 crc kubenswrapper[4941]: E1130 08:13:54.333036 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-metadata" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333042 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-metadata" Nov 30 08:13:54 crc kubenswrapper[4941]: E1130 08:13:54.333056 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" containerName="nova-scheduler-scheduler" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333062 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" containerName="nova-scheduler-scheduler" Nov 30 08:13:54 crc kubenswrapper[4941]: E1130 08:13:54.333078 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-log" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333084 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-log" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333292 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-log" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333303 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-log" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333313 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" containerName="nova-metadata-metadata" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333340 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" containerName="nova-scheduler-scheduler" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.333353 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" containerName="nova-api-api" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.346936 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.362447 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.363128 4941 scope.go:117] "RemoveContainer" containerID="8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.395730 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.419441 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.431305 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-config-data\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.431448 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.431481 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd10ed18-796a-4734-843c-9e2ae2973797-logs\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.431750 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvprl\" (UniqueName: \"kubernetes.io/projected/cd10ed18-796a-4734-843c-9e2ae2973797-kube-api-access-mvprl\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.434495 4941 scope.go:117] "RemoveContainer" containerID="8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5" Nov 30 08:13:54 crc kubenswrapper[4941]: E1130 08:13:54.437799 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5\": container with ID starting with 8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5 not found: ID does not exist" containerID="8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.437869 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5"} err="failed to get container status \"8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5\": rpc error: code = NotFound desc = could not find container \"8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5\": container with ID starting with 8d735789bc329bf90547b0e5eaceed83076651b0ae29f8c0652cf0a6b7061bf5 not found: ID does not exist" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.437908 4941 scope.go:117] "RemoveContainer" containerID="23ed605cc707fb420bacbb8275741163005a6058a7a0bb3cdecf35773dc9d078" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.448004 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.461517 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.466922 4941 scope.go:117] "RemoveContainer" containerID="d50d2061a1e8afb52fd48ecb439a4f86d9f04f93eca330b76817ea6e475d0ec4" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.472303 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.482062 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.484877 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.498467 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.500223 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.504880 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.505600 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.536517 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.536877 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npj85\" (UniqueName: \"kubernetes.io/projected/a356bfee-e892-474f-8f09-b179804dced1-kube-api-access-npj85\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.536971 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.537003 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-config-data\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.537029 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvprl\" (UniqueName: \"kubernetes.io/projected/cd10ed18-796a-4734-843c-9e2ae2973797-kube-api-access-mvprl\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.537055 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-config-data\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.537072 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlb6v\" (UniqueName: \"kubernetes.io/projected/d7f3b443-db5d-4635-b028-9adf4c846090-kube-api-access-nlb6v\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.537102 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-config-data\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.540084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7f3b443-db5d-4635-b028-9adf4c846090-logs\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.540192 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.540227 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd10ed18-796a-4734-843c-9e2ae2973797-logs\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.540298 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.543842 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd10ed18-796a-4734-843c-9e2ae2973797-logs\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.553517 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.553796 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-config-data\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.554743 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.560929 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvprl\" (UniqueName: \"kubernetes.io/projected/cd10ed18-796a-4734-843c-9e2ae2973797-kube-api-access-mvprl\") pod \"nova-api-0\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.644790 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-config-data\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.645173 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-config-data\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.645206 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlb6v\" (UniqueName: \"kubernetes.io/projected/d7f3b443-db5d-4635-b028-9adf4c846090-kube-api-access-nlb6v\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.645263 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7f3b443-db5d-4635-b028-9adf4c846090-logs\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.645314 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.645385 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npj85\" (UniqueName: \"kubernetes.io/projected/a356bfee-e892-474f-8f09-b179804dced1-kube-api-access-npj85\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.645439 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.646546 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7f3b443-db5d-4635-b028-9adf4c846090-logs\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.649517 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-config-data\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.650022 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.650997 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.652978 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-config-data\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.661976 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlb6v\" (UniqueName: \"kubernetes.io/projected/d7f3b443-db5d-4635-b028-9adf4c846090-kube-api-access-nlb6v\") pod \"nova-metadata-0\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.663009 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npj85\" (UniqueName: \"kubernetes.io/projected/a356bfee-e892-474f-8f09-b179804dced1-kube-api-access-npj85\") pod \"nova-scheduler-0\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " pod="openstack/nova-scheduler-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.727293 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.837803 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 08:13:54 crc kubenswrapper[4941]: I1130 08:13:54.849011 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.136020 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.209662 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 08:13:55 crc kubenswrapper[4941]: W1130 08:13:55.212687 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd10ed18_796a_4734_843c_9e2ae2973797.slice/crio-1a45fc4c0967f6969afde9445bd72ce8d514e93a181cdf2237b77f58c477a6d8 WatchSource:0}: Error finding container 1a45fc4c0967f6969afde9445bd72ce8d514e93a181cdf2237b77f58c477a6d8: Status 404 returned error can't find the container with id 1a45fc4c0967f6969afde9445bd72ce8d514e93a181cdf2237b77f58c477a6d8 Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.268866 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd10ed18-796a-4734-843c-9e2ae2973797","Type":"ContainerStarted","Data":"1a45fc4c0967f6969afde9445bd72ce8d514e93a181cdf2237b77f58c477a6d8"} Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.270727 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7f3b443-db5d-4635-b028-9adf4c846090","Type":"ContainerStarted","Data":"3451bd08021b0805f87341f2183b981750a4bc4a5a0d6cc1a85728c48a189ce4"} Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.407489 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.544080 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48be0b99-919f-463f-b80d-8d737b43a70b" path="/var/lib/kubelet/pods/48be0b99-919f-463f-b80d-8d737b43a70b/volumes" Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.545199 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea" path="/var/lib/kubelet/pods/6123f4f9-4f7d-4c8c-bef2-7d6a05ff03ea/volumes" Nov 30 08:13:55 crc kubenswrapper[4941]: I1130 08:13:55.546612 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c6ac8f7-56ec-4d98-985e-c8571887dcb9" path="/var/lib/kubelet/pods/9c6ac8f7-56ec-4d98-985e-c8571887dcb9/volumes" Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.288529 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a356bfee-e892-474f-8f09-b179804dced1","Type":"ContainerStarted","Data":"ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24"} Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.289178 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a356bfee-e892-474f-8f09-b179804dced1","Type":"ContainerStarted","Data":"7d7d91de554e3c0334b9929fb642df346128dc62d009d5c54cc279fecdf30120"} Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.292719 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7f3b443-db5d-4635-b028-9adf4c846090","Type":"ContainerStarted","Data":"cfe8c2f296dd4667d96f5ff72fdb4c53478ea9c52589a33990517ef81a7b560a"} Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.292750 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7f3b443-db5d-4635-b028-9adf4c846090","Type":"ContainerStarted","Data":"403dcee0e54f75fc1e836211d7158dfd0dc692b8fbe61a00b31f70ec6f2f3c4d"} Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.295504 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd10ed18-796a-4734-843c-9e2ae2973797","Type":"ContainerStarted","Data":"af07f6fcebb8548eeb4f1bdb32a12056f3c2f9352865b6cbdd49adb22ef4d104"} Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.295540 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd10ed18-796a-4734-843c-9e2ae2973797","Type":"ContainerStarted","Data":"db663f0cce711c93efe883f85529d4ed3ff32c34a52b403b693e93a0a5794ae1"} Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.320882 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.320857501 podStartE2EDuration="2.320857501s" podCreationTimestamp="2025-11-30 08:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:56.311692987 +0000 UTC m=+5257.079864596" watchObservedRunningTime="2025-11-30 08:13:56.320857501 +0000 UTC m=+5257.089029110" Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.348173 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.348153095 podStartE2EDuration="2.348153095s" podCreationTimestamp="2025-11-30 08:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:56.340115807 +0000 UTC m=+5257.108287416" watchObservedRunningTime="2025-11-30 08:13:56.348153095 +0000 UTC m=+5257.116324704" Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.367656 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.367637368 podStartE2EDuration="2.367637368s" podCreationTimestamp="2025-11-30 08:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:13:56.361460458 +0000 UTC m=+5257.129632077" watchObservedRunningTime="2025-11-30 08:13:56.367637368 +0000 UTC m=+5257.135808977" Nov 30 08:13:56 crc kubenswrapper[4941]: I1130 08:13:56.611063 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.317459 4941 generic.go:334] "Generic (PLEG): container finished" podID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" containerID="a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af" exitCode=0 Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.317554 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3","Type":"ContainerDied","Data":"a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af"} Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.317885 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3","Type":"ContainerDied","Data":"8a3010ffc703425f2b86d9e6a2e780576e2ef506d2680929c301fdd5b6dc3355"} Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.317905 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a3010ffc703425f2b86d9e6a2e780576e2ef506d2680929c301fdd5b6dc3355" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.354835 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.432707 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-config-data\") pod \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.432988 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chn5v\" (UniqueName: \"kubernetes.io/projected/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-kube-api-access-chn5v\") pod \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.433031 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-combined-ca-bundle\") pod \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\" (UID: \"e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3\") " Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.451657 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-kube-api-access-chn5v" (OuterVolumeSpecName: "kube-api-access-chn5v") pod "e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" (UID: "e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3"). InnerVolumeSpecName "kube-api-access-chn5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.460238 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-config-data" (OuterVolumeSpecName: "config-data") pod "e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" (UID: "e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.484239 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" (UID: "e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.534940 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chn5v\" (UniqueName: \"kubernetes.io/projected/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-kube-api-access-chn5v\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.534985 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:58 crc kubenswrapper[4941]: I1130 08:13:58.534995 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.326775 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.372302 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.381862 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.403624 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:13:59 crc kubenswrapper[4941]: E1130 08:13:59.407896 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" containerName="nova-cell1-conductor-conductor" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.407931 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" containerName="nova-cell1-conductor-conductor" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.408173 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" containerName="nova-cell1-conductor-conductor" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.408958 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.412145 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.417470 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.452129 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kplpx\" (UniqueName: \"kubernetes.io/projected/b0f67deb-a45e-4c98-ae10-793d43722433-kube-api-access-kplpx\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.452249 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.452306 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.532757 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3" path="/var/lib/kubelet/pods/e7e651ef-aedd-4a09-aeeb-f8dc37bda5d3/volumes" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.554070 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kplpx\" (UniqueName: \"kubernetes.io/projected/b0f67deb-a45e-4c98-ae10-793d43722433-kube-api-access-kplpx\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.554168 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.554224 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.562393 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.563042 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.570687 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kplpx\" (UniqueName: \"kubernetes.io/projected/b0f67deb-a45e-4c98-ae10-793d43722433-kube-api-access-kplpx\") pod \"nova-cell1-conductor-0\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.734557 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.839260 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.840299 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 08:13:59 crc kubenswrapper[4941]: I1130 08:13:59.849814 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 08:14:00 crc kubenswrapper[4941]: I1130 08:14:00.278370 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 08:14:00 crc kubenswrapper[4941]: I1130 08:14:00.342789 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b0f67deb-a45e-4c98-ae10-793d43722433","Type":"ContainerStarted","Data":"c030ba71005f520b777d0a4873c316ab1e7b5b117da69d40b7f2bf457345a952"} Nov 30 08:14:01 crc kubenswrapper[4941]: I1130 08:14:01.358103 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b0f67deb-a45e-4c98-ae10-793d43722433","Type":"ContainerStarted","Data":"60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020"} Nov 30 08:14:01 crc kubenswrapper[4941]: I1130 08:14:01.358693 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 30 08:14:01 crc kubenswrapper[4941]: I1130 08:14:01.382636 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.382611194 podStartE2EDuration="2.382611194s" podCreationTimestamp="2025-11-30 08:13:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:14:01.377181196 +0000 UTC m=+5262.145352815" watchObservedRunningTime="2025-11-30 08:14:01.382611194 +0000 UTC m=+5262.150782843" Nov 30 08:14:01 crc kubenswrapper[4941]: I1130 08:14:01.611808 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:14:01 crc kubenswrapper[4941]: I1130 08:14:01.678550 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 30 08:14:01 crc kubenswrapper[4941]: I1130 08:14:01.745652 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:14:02 crc kubenswrapper[4941]: I1130 08:14:02.376788 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 30 08:14:02 crc kubenswrapper[4941]: I1130 08:14:02.978755 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:14:02 crc kubenswrapper[4941]: I1130 08:14:02.978823 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:14:04 crc kubenswrapper[4941]: I1130 08:14:04.728210 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:14:04 crc kubenswrapper[4941]: I1130 08:14:04.728775 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 08:14:04 crc kubenswrapper[4941]: I1130 08:14:04.838890 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:14:04 crc kubenswrapper[4941]: I1130 08:14:04.838950 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 08:14:04 crc kubenswrapper[4941]: I1130 08:14:04.849715 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 08:14:04 crc kubenswrapper[4941]: I1130 08:14:04.885408 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 08:14:05 crc kubenswrapper[4941]: I1130 08:14:05.433425 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 08:14:05 crc kubenswrapper[4941]: I1130 08:14:05.811507 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:14:05 crc kubenswrapper[4941]: I1130 08:14:05.811511 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.76:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:14:05 crc kubenswrapper[4941]: I1130 08:14:05.923534 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:14:05 crc kubenswrapper[4941]: I1130 08:14:05.923613 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.772801 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.877806 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.881430 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.885824 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.902140 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.914292 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-scripts\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.914447 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.914579 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvhj9\" (UniqueName: \"kubernetes.io/projected/8163a913-96b4-4caf-a8c4-fb681de860e6-kube-api-access-rvhj9\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.914730 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.914912 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8163a913-96b4-4caf-a8c4-fb681de860e6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:09 crc kubenswrapper[4941]: I1130 08:14:09.915264 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.016993 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvhj9\" (UniqueName: \"kubernetes.io/projected/8163a913-96b4-4caf-a8c4-fb681de860e6-kube-api-access-rvhj9\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.017050 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.017093 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8163a913-96b4-4caf-a8c4-fb681de860e6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.017142 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.017184 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-scripts\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.017236 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.017271 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8163a913-96b4-4caf-a8c4-fb681de860e6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.026644 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.027237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.035760 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-scripts\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.037711 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.044567 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvhj9\" (UniqueName: \"kubernetes.io/projected/8163a913-96b4-4caf-a8c4-fb681de860e6-kube-api-access-rvhj9\") pod \"cinder-scheduler-0\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.232646 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 08:14:10 crc kubenswrapper[4941]: I1130 08:14:10.875589 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:11 crc kubenswrapper[4941]: I1130 08:14:11.495043 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8163a913-96b4-4caf-a8c4-fb681de860e6","Type":"ContainerStarted","Data":"962aed0d033da7b8d55d85a4da90ecc2e764350341e6add76e3a0e77e67abaf9"} Nov 30 08:14:11 crc kubenswrapper[4941]: I1130 08:14:11.563949 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:14:11 crc kubenswrapper[4941]: I1130 08:14:11.564305 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api-log" containerID="cri-o://f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a" gracePeriod=30 Nov 30 08:14:11 crc kubenswrapper[4941]: I1130 08:14:11.564479 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api" containerID="cri-o://41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3" gracePeriod=30 Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.267020 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.270117 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.279849 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.295183 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.392611 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.392663 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.392684 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393206 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393349 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-run\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393385 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393412 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393583 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-sys\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393661 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393716 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393810 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393910 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393971 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6ddm\" (UniqueName: \"kubernetes.io/projected/f52fb817-2c20-4482-a999-7780b6902951-kube-api-access-j6ddm\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.393997 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-dev\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.394118 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f52fb817-2c20-4482-a999-7780b6902951-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.394191 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.495990 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-run\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496064 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496093 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496131 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-sys\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496131 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-run\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496154 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496259 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-sys\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496401 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496401 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496483 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496519 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496526 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496558 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496611 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6ddm\" (UniqueName: \"kubernetes.io/projected/f52fb817-2c20-4482-a999-7780b6902951-kube-api-access-j6ddm\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496642 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-dev\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496682 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f52fb817-2c20-4482-a999-7780b6902951-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496723 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-dev\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496729 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496754 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.496994 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.497055 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.497096 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.497409 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.497503 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.497570 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.497680 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f52fb817-2c20-4482-a999-7780b6902951-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.504756 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.506361 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.507312 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.516204 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f52fb817-2c20-4482-a999-7780b6902951-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.518555 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f52fb817-2c20-4482-a999-7780b6902951-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.519527 4941 generic.go:334] "Generic (PLEG): container finished" podID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerID="f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a" exitCode=143 Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.519586 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed051ad7-48ac-415d-b3b2-e58662aa030c","Type":"ContainerDied","Data":"f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a"} Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.522149 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8163a913-96b4-4caf-a8c4-fb681de860e6","Type":"ContainerStarted","Data":"8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084"} Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.523199 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6ddm\" (UniqueName: \"kubernetes.io/projected/f52fb817-2c20-4482-a999-7780b6902951-kube-api-access-j6ddm\") pod \"cinder-volume-volume1-0\" (UID: \"f52fb817-2c20-4482-a999-7780b6902951\") " pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.614277 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.866527 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.868932 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.874263 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.910931 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-config-data\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911004 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-sys\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911046 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911183 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911209 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-scripts\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911233 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-lib-modules\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911255 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-nvme\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911285 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-run\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911319 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911364 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911661 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-ceph\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911693 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911722 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-dev\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.911778 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqrsh\" (UniqueName: \"kubernetes.io/projected/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-kube-api-access-kqrsh\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.914270 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-config-data-custom\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.914367 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:12 crc kubenswrapper[4941]: I1130 08:14:12.923991 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016376 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-run\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016441 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016474 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016546 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-ceph\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016584 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016604 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-dev\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016639 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqrsh\" (UniqueName: \"kubernetes.io/projected/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-kube-api-access-kqrsh\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016677 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-config-data-custom\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016725 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-config-data\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016749 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-sys\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016768 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016799 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016819 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-scripts\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016842 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-lib-modules\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016863 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-nvme\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.016970 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-nvme\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.017165 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-run\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.017216 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.017563 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.017705 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-dev\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.017857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-sys\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.018106 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-lib-modules\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.018147 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.018759 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.018791 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.026624 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-config-data-custom\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.026949 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-ceph\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.027202 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-scripts\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.027571 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-config-data\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.028558 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.035970 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqrsh\" (UniqueName: \"kubernetes.io/projected/4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7-kube-api-access-kqrsh\") pod \"cinder-backup-0\" (UID: \"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7\") " pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.200690 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.377125 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.538803 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"f52fb817-2c20-4482-a999-7780b6902951","Type":"ContainerStarted","Data":"e61c6af658dee98d74af7cda4a5814e90e05187f82d615a8a1bb713ede09d405"} Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.541164 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8163a913-96b4-4caf-a8c4-fb681de860e6","Type":"ContainerStarted","Data":"25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa"} Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.577473 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.294625237 podStartE2EDuration="4.57743713s" podCreationTimestamp="2025-11-30 08:14:09 +0000 UTC" firstStartedPulling="2025-11-30 08:14:10.88120768 +0000 UTC m=+5271.649379289" lastFinishedPulling="2025-11-30 08:14:11.164019573 +0000 UTC m=+5271.932191182" observedRunningTime="2025-11-30 08:14:13.56291 +0000 UTC m=+5274.331081619" watchObservedRunningTime="2025-11-30 08:14:13.57743713 +0000 UTC m=+5274.345608749" Nov 30 08:14:13 crc kubenswrapper[4941]: I1130 08:14:13.868254 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 30 08:14:13 crc kubenswrapper[4941]: W1130 08:14:13.878072 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b1e8173_fb77_47d7_8a0f_4d7fc86cd9f7.slice/crio-8b445967a62d9d3b4882952e22fb3f4cc3d831e3f2b0375797707a218bbaa217 WatchSource:0}: Error finding container 8b445967a62d9d3b4882952e22fb3f4cc3d831e3f2b0375797707a218bbaa217: Status 404 returned error can't find the container with id 8b445967a62d9d3b4882952e22fb3f4cc3d831e3f2b0375797707a218bbaa217 Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.554283 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7","Type":"ContainerStarted","Data":"8b445967a62d9d3b4882952e22fb3f4cc3d831e3f2b0375797707a218bbaa217"} Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.556625 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"f52fb817-2c20-4482-a999-7780b6902951","Type":"ContainerStarted","Data":"f34a280cb4dacc0eb25f66e6fd89ef1edf95eae448a333fc8ba4bf3e115a45dd"} Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.732488 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.732896 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.73:8776/healthcheck\": read tcp 10.217.0.2:59912->10.217.1.73:8776: read: connection reset by peer" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.733773 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.736279 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.740387 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.842734 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.845545 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 08:14:14 crc kubenswrapper[4941]: I1130 08:14:14.846721 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.136004 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176226 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-scripts\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176398 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdjzd\" (UniqueName: \"kubernetes.io/projected/ed051ad7-48ac-415d-b3b2-e58662aa030c-kube-api-access-wdjzd\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176544 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176619 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data-custom\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176688 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-combined-ca-bundle\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176729 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed051ad7-48ac-415d-b3b2-e58662aa030c-logs\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.176779 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed051ad7-48ac-415d-b3b2-e58662aa030c-etc-machine-id\") pod \"ed051ad7-48ac-415d-b3b2-e58662aa030c\" (UID: \"ed051ad7-48ac-415d-b3b2-e58662aa030c\") " Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.177474 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed051ad7-48ac-415d-b3b2-e58662aa030c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.179717 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed051ad7-48ac-415d-b3b2-e58662aa030c-logs" (OuterVolumeSpecName: "logs") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.189704 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.203219 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-scripts" (OuterVolumeSpecName: "scripts") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.203694 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed051ad7-48ac-415d-b3b2-e58662aa030c-kube-api-access-wdjzd" (OuterVolumeSpecName: "kube-api-access-wdjzd") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "kube-api-access-wdjzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.235055 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.281539 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.281577 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed051ad7-48ac-415d-b3b2-e58662aa030c-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.281588 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed051ad7-48ac-415d-b3b2-e58662aa030c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.281599 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.281610 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdjzd\" (UniqueName: \"kubernetes.io/projected/ed051ad7-48ac-415d-b3b2-e58662aa030c-kube-api-access-wdjzd\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.344769 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.349729 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data" (OuterVolumeSpecName: "config-data") pod "ed051ad7-48ac-415d-b3b2-e58662aa030c" (UID: "ed051ad7-48ac-415d-b3b2-e58662aa030c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.383905 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.383954 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed051ad7-48ac-415d-b3b2-e58662aa030c-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.577761 4941 generic.go:334] "Generic (PLEG): container finished" podID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerID="41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3" exitCode=0 Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.578247 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.578273 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed051ad7-48ac-415d-b3b2-e58662aa030c","Type":"ContainerDied","Data":"41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3"} Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.580983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed051ad7-48ac-415d-b3b2-e58662aa030c","Type":"ContainerDied","Data":"8e38e8f8a425ac404471f74d7ba05a1c7606887d2bf4ec97459ea71c57a1307e"} Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.581022 4941 scope.go:117] "RemoveContainer" containerID="41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.600193 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7","Type":"ContainerStarted","Data":"92448a94fd9b239df8723b6739abb952dc040e13a4de5bd7cbe7f5a120394bdc"} Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.600925 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7","Type":"ContainerStarted","Data":"9fa3fc5c0950d007a6f26eac8b3e7db366076efa8d0ea00dd907bf002537c4a9"} Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.634436 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"f52fb817-2c20-4482-a999-7780b6902951","Type":"ContainerStarted","Data":"360acf3435bca982bb9dabc6a4199ea9a75ff1bc0445720ce5ad5e15e5e910d1"} Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.634897 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.650171 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.650741 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.652761 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.177910646 podStartE2EDuration="3.652743962s" podCreationTimestamp="2025-11-30 08:14:12 +0000 UTC" firstStartedPulling="2025-11-30 08:14:13.880577212 +0000 UTC m=+5274.648748811" lastFinishedPulling="2025-11-30 08:14:14.355410518 +0000 UTC m=+5275.123582127" observedRunningTime="2025-11-30 08:14:15.645841378 +0000 UTC m=+5276.414012997" watchObservedRunningTime="2025-11-30 08:14:15.652743962 +0000 UTC m=+5276.420915561" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.662008 4941 scope.go:117] "RemoveContainer" containerID="f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.675277 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.2801221480000002 podStartE2EDuration="3.675206577s" podCreationTimestamp="2025-11-30 08:14:12 +0000 UTC" firstStartedPulling="2025-11-30 08:14:13.389677148 +0000 UTC m=+5274.157848757" lastFinishedPulling="2025-11-30 08:14:13.784761567 +0000 UTC m=+5274.552933186" observedRunningTime="2025-11-30 08:14:15.671677728 +0000 UTC m=+5276.439849347" watchObservedRunningTime="2025-11-30 08:14:15.675206577 +0000 UTC m=+5276.443378186" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.704787 4941 scope.go:117] "RemoveContainer" containerID="41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3" Nov 30 08:14:15 crc kubenswrapper[4941]: E1130 08:14:15.720831 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3\": container with ID starting with 41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3 not found: ID does not exist" containerID="41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.720884 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3"} err="failed to get container status \"41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3\": rpc error: code = NotFound desc = could not find container \"41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3\": container with ID starting with 41d1cbd85bbc3aed776a505d5bfadb63af88392c61955487fe55d8f5abcecbf3 not found: ID does not exist" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.720914 4941 scope.go:117] "RemoveContainer" containerID="f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.738461 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:14:15 crc kubenswrapper[4941]: E1130 08:14:15.746784 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a\": container with ID starting with f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a not found: ID does not exist" containerID="f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.746831 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a"} err="failed to get container status \"f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a\": rpc error: code = NotFound desc = could not find container \"f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a\": container with ID starting with f3e3c974f3dbd19dd063f75fb24b8f9965383284e6564d0463003827f3c2bb2a not found: ID does not exist" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.765421 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.775611 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:14:15 crc kubenswrapper[4941]: E1130 08:14:15.776279 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api-log" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.776307 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api-log" Nov 30 08:14:15 crc kubenswrapper[4941]: E1130 08:14:15.776454 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.776474 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.776704 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api-log" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.776725 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" containerName="cinder-api" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.778155 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.797687 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798513 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-config-data\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798572 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4hpn\" (UniqueName: \"kubernetes.io/projected/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-kube-api-access-l4hpn\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798614 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-logs\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798702 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-scripts\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798724 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798781 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.798809 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.822362 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.899877 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-scripts\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.899936 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.899990 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.900018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.900074 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-config-data\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.900105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4hpn\" (UniqueName: \"kubernetes.io/projected/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-kube-api-access-l4hpn\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.900142 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-logs\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.900894 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-logs\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.902741 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.908523 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-scripts\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.908998 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-config-data-custom\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.910502 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.917505 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-config-data\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:15 crc kubenswrapper[4941]: I1130 08:14:15.932090 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4hpn\" (UniqueName: \"kubernetes.io/projected/f9d9e3e8-f265-4ef9-9f1f-9e66b80be876-kube-api-access-l4hpn\") pod \"cinder-api-0\" (UID: \"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876\") " pod="openstack/cinder-api-0" Nov 30 08:14:16 crc kubenswrapper[4941]: I1130 08:14:16.146694 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 30 08:14:16 crc kubenswrapper[4941]: I1130 08:14:16.684404 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 30 08:14:17 crc kubenswrapper[4941]: I1130 08:14:17.542265 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed051ad7-48ac-415d-b3b2-e58662aa030c" path="/var/lib/kubelet/pods/ed051ad7-48ac-415d-b3b2-e58662aa030c/volumes" Nov 30 08:14:17 crc kubenswrapper[4941]: I1130 08:14:17.615454 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:17 crc kubenswrapper[4941]: I1130 08:14:17.665749 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876","Type":"ContainerStarted","Data":"8c69b76c658d37e14dcba4f0f29917a7b7934e6a3dd0e525ccd026232b2d462c"} Nov 30 08:14:17 crc kubenswrapper[4941]: I1130 08:14:17.665792 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876","Type":"ContainerStarted","Data":"6dd29b844eade9edf13f72e71123bb187ec338826fe03446782e3d55123085dd"} Nov 30 08:14:18 crc kubenswrapper[4941]: I1130 08:14:18.203954 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Nov 30 08:14:18 crc kubenswrapper[4941]: I1130 08:14:18.679549 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"f9d9e3e8-f265-4ef9-9f1f-9e66b80be876","Type":"ContainerStarted","Data":"0c3f60bd03d122f663498cd57e26482981dab22b9423caacba4439618f6eb304"} Nov 30 08:14:18 crc kubenswrapper[4941]: I1130 08:14:18.680635 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 30 08:14:18 crc kubenswrapper[4941]: I1130 08:14:18.705983 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.7059665 podStartE2EDuration="3.7059665s" podCreationTimestamp="2025-11-30 08:14:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:14:18.704182066 +0000 UTC m=+5279.472353685" watchObservedRunningTime="2025-11-30 08:14:18.7059665 +0000 UTC m=+5279.474138099" Nov 30 08:14:20 crc kubenswrapper[4941]: I1130 08:14:20.473285 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 30 08:14:20 crc kubenswrapper[4941]: I1130 08:14:20.539505 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:20 crc kubenswrapper[4941]: I1130 08:14:20.716198 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="cinder-scheduler" containerID="cri-o://8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084" gracePeriod=30 Nov 30 08:14:20 crc kubenswrapper[4941]: I1130 08:14:20.716375 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="probe" containerID="cri-o://25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa" gracePeriod=30 Nov 30 08:14:21 crc kubenswrapper[4941]: I1130 08:14:21.731602 4941 generic.go:334] "Generic (PLEG): container finished" podID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerID="25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa" exitCode=0 Nov 30 08:14:21 crc kubenswrapper[4941]: I1130 08:14:21.731702 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8163a913-96b4-4caf-a8c4-fb681de860e6","Type":"ContainerDied","Data":"25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa"} Nov 30 08:14:22 crc kubenswrapper[4941]: I1130 08:14:22.939505 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Nov 30 08:14:23 crc kubenswrapper[4941]: E1130 08:14:23.205023 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8163a913_96b4_4caf_a8c4_fb681de860e6.slice/crio-8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084.scope\": RecentStats: unable to find data in memory cache]" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.424802 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.513481 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.703197 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data-custom\") pod \"8163a913-96b4-4caf-a8c4-fb681de860e6\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.703567 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-scripts\") pod \"8163a913-96b4-4caf-a8c4-fb681de860e6\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.703683 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvhj9\" (UniqueName: \"kubernetes.io/projected/8163a913-96b4-4caf-a8c4-fb681de860e6-kube-api-access-rvhj9\") pod \"8163a913-96b4-4caf-a8c4-fb681de860e6\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.703860 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-combined-ca-bundle\") pod \"8163a913-96b4-4caf-a8c4-fb681de860e6\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.704366 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data\") pod \"8163a913-96b4-4caf-a8c4-fb681de860e6\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.704578 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8163a913-96b4-4caf-a8c4-fb681de860e6-etc-machine-id\") pod \"8163a913-96b4-4caf-a8c4-fb681de860e6\" (UID: \"8163a913-96b4-4caf-a8c4-fb681de860e6\") " Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.706981 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8163a913-96b4-4caf-a8c4-fb681de860e6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8163a913-96b4-4caf-a8c4-fb681de860e6" (UID: "8163a913-96b4-4caf-a8c4-fb681de860e6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.715686 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8163a913-96b4-4caf-a8c4-fb681de860e6-kube-api-access-rvhj9" (OuterVolumeSpecName: "kube-api-access-rvhj9") pod "8163a913-96b4-4caf-a8c4-fb681de860e6" (UID: "8163a913-96b4-4caf-a8c4-fb681de860e6"). InnerVolumeSpecName "kube-api-access-rvhj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.717963 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-scripts" (OuterVolumeSpecName: "scripts") pod "8163a913-96b4-4caf-a8c4-fb681de860e6" (UID: "8163a913-96b4-4caf-a8c4-fb681de860e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.720737 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8163a913-96b4-4caf-a8c4-fb681de860e6" (UID: "8163a913-96b4-4caf-a8c4-fb681de860e6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.751445 4941 generic.go:334] "Generic (PLEG): container finished" podID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerID="8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084" exitCode=0 Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.751485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8163a913-96b4-4caf-a8c4-fb681de860e6","Type":"ContainerDied","Data":"8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084"} Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.751512 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8163a913-96b4-4caf-a8c4-fb681de860e6","Type":"ContainerDied","Data":"962aed0d033da7b8d55d85a4da90ecc2e764350341e6add76e3a0e77e67abaf9"} Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.751529 4941 scope.go:117] "RemoveContainer" containerID="25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.751665 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.792863 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8163a913-96b4-4caf-a8c4-fb681de860e6" (UID: "8163a913-96b4-4caf-a8c4-fb681de860e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.806173 4941 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.806227 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.806239 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvhj9\" (UniqueName: \"kubernetes.io/projected/8163a913-96b4-4caf-a8c4-fb681de860e6-kube-api-access-rvhj9\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.806254 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.806268 4941 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8163a913-96b4-4caf-a8c4-fb681de860e6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.816148 4941 scope.go:117] "RemoveContainer" containerID="8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.839822 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data" (OuterVolumeSpecName: "config-data") pod "8163a913-96b4-4caf-a8c4-fb681de860e6" (UID: "8163a913-96b4-4caf-a8c4-fb681de860e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.841567 4941 scope.go:117] "RemoveContainer" containerID="25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa" Nov 30 08:14:23 crc kubenswrapper[4941]: E1130 08:14:23.842103 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa\": container with ID starting with 25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa not found: ID does not exist" containerID="25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.842149 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa"} err="failed to get container status \"25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa\": rpc error: code = NotFound desc = could not find container \"25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa\": container with ID starting with 25e6039e343deaf0f7c0a6f4b3a709531bd66ab2e5758bb6f5406340f23394aa not found: ID does not exist" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.842179 4941 scope.go:117] "RemoveContainer" containerID="8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084" Nov 30 08:14:23 crc kubenswrapper[4941]: E1130 08:14:23.842605 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084\": container with ID starting with 8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084 not found: ID does not exist" containerID="8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.842640 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084"} err="failed to get container status \"8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084\": rpc error: code = NotFound desc = could not find container \"8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084\": container with ID starting with 8e26571e9c9aedd5ec089196dd23adbd552312b0bb43704834f2823e566a7084 not found: ID does not exist" Nov 30 08:14:23 crc kubenswrapper[4941]: I1130 08:14:23.909754 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8163a913-96b4-4caf-a8c4-fb681de860e6-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.152488 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.171517 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.186554 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:24 crc kubenswrapper[4941]: E1130 08:14:24.187767 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="cinder-scheduler" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.187810 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="cinder-scheduler" Nov 30 08:14:24 crc kubenswrapper[4941]: E1130 08:14:24.187863 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="probe" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.187876 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="probe" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.188163 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="probe" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.188193 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" containerName="cinder-scheduler" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.189795 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.191588 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.208914 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.316277 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.316400 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frhk4\" (UniqueName: \"kubernetes.io/projected/99967ec0-73ff-4130-9f6f-2287978e418c-kube-api-access-frhk4\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.316437 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-config-data\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.316532 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.316561 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-scripts\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.316596 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/99967ec0-73ff-4130-9f6f-2287978e418c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.418590 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.418680 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frhk4\" (UniqueName: \"kubernetes.io/projected/99967ec0-73ff-4130-9f6f-2287978e418c-kube-api-access-frhk4\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.418709 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-config-data\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.418789 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.418809 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-scripts\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.419022 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/99967ec0-73ff-4130-9f6f-2287978e418c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.419111 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/99967ec0-73ff-4130-9f6f-2287978e418c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.425956 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.426787 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.426989 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-config-data\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.431892 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99967ec0-73ff-4130-9f6f-2287978e418c-scripts\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.439866 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frhk4\" (UniqueName: \"kubernetes.io/projected/99967ec0-73ff-4130-9f6f-2287978e418c-kube-api-access-frhk4\") pod \"cinder-scheduler-0\" (UID: \"99967ec0-73ff-4130-9f6f-2287978e418c\") " pod="openstack/cinder-scheduler-0" Nov 30 08:14:24 crc kubenswrapper[4941]: I1130 08:14:24.506145 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 30 08:14:25 crc kubenswrapper[4941]: I1130 08:14:25.037068 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 30 08:14:25 crc kubenswrapper[4941]: W1130 08:14:25.039901 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99967ec0_73ff_4130_9f6f_2287978e418c.slice/crio-16669f855a6f861a920d7edf70602ce0cdceafd777e5110ea732e2423eaa1ab4 WatchSource:0}: Error finding container 16669f855a6f861a920d7edf70602ce0cdceafd777e5110ea732e2423eaa1ab4: Status 404 returned error can't find the container with id 16669f855a6f861a920d7edf70602ce0cdceafd777e5110ea732e2423eaa1ab4 Nov 30 08:14:25 crc kubenswrapper[4941]: I1130 08:14:25.539822 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8163a913-96b4-4caf-a8c4-fb681de860e6" path="/var/lib/kubelet/pods/8163a913-96b4-4caf-a8c4-fb681de860e6/volumes" Nov 30 08:14:25 crc kubenswrapper[4941]: I1130 08:14:25.775809 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"99967ec0-73ff-4130-9f6f-2287978e418c","Type":"ContainerStarted","Data":"16669f855a6f861a920d7edf70602ce0cdceafd777e5110ea732e2423eaa1ab4"} Nov 30 08:14:26 crc kubenswrapper[4941]: I1130 08:14:26.790727 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"99967ec0-73ff-4130-9f6f-2287978e418c","Type":"ContainerStarted","Data":"455535facb1ce0a3a0d52be9b2b68bbfcbed839def59657b576aed9b43ea1c6a"} Nov 30 08:14:26 crc kubenswrapper[4941]: I1130 08:14:26.791919 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"99967ec0-73ff-4130-9f6f-2287978e418c","Type":"ContainerStarted","Data":"f58bbf67d7b9b4e41143477e1c401b682d1591b42a6a0d96314441ae188c6adc"} Nov 30 08:14:26 crc kubenswrapper[4941]: I1130 08:14:26.840864 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.8408329 podStartE2EDuration="2.8408329s" podCreationTimestamp="2025-11-30 08:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:14:26.830111447 +0000 UTC m=+5287.598283106" watchObservedRunningTime="2025-11-30 08:14:26.8408329 +0000 UTC m=+5287.609004549" Nov 30 08:14:28 crc kubenswrapper[4941]: I1130 08:14:28.130126 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 30 08:14:29 crc kubenswrapper[4941]: I1130 08:14:29.507181 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 30 08:14:31 crc kubenswrapper[4941]: I1130 08:14:31.094747 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-gxz8w"] Nov 30 08:14:31 crc kubenswrapper[4941]: I1130 08:14:31.108001 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-gxz8w"] Nov 30 08:14:31 crc kubenswrapper[4941]: I1130 08:14:31.543822 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92d378e1-3a85-473a-bcc8-e74c780db6ad" path="/var/lib/kubelet/pods/92d378e1-3a85-473a-bcc8-e74c780db6ad/volumes" Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.040153 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7c0e-account-create-update-tjpdc"] Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.049154 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7c0e-account-create-update-tjpdc"] Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.979105 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.979246 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.979361 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.980693 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:14:32 crc kubenswrapper[4941]: I1130 08:14:32.980800 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" gracePeriod=600 Nov 30 08:14:33 crc kubenswrapper[4941]: E1130 08:14:33.125227 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:14:33 crc kubenswrapper[4941]: I1130 08:14:33.540563 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3" path="/var/lib/kubelet/pods/7b11da1e-c7d3-4e7d-9f8c-c22fef5fd2f3/volumes" Nov 30 08:14:33 crc kubenswrapper[4941]: I1130 08:14:33.883360 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" exitCode=0 Nov 30 08:14:33 crc kubenswrapper[4941]: I1130 08:14:33.883368 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f"} Nov 30 08:14:33 crc kubenswrapper[4941]: I1130 08:14:33.883485 4941 scope.go:117] "RemoveContainer" containerID="b4e31e26d1d1b739fd99a437fefc2ed911d6c5ebbad46f7debdd4b9fe9b10aa8" Nov 30 08:14:33 crc kubenswrapper[4941]: I1130 08:14:33.884759 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:14:33 crc kubenswrapper[4941]: E1130 08:14:33.885272 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:14:34 crc kubenswrapper[4941]: I1130 08:14:34.770910 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 30 08:14:44 crc kubenswrapper[4941]: I1130 08:14:44.044249 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-v4vzc"] Nov 30 08:14:44 crc kubenswrapper[4941]: I1130 08:14:44.053487 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-v4vzc"] Nov 30 08:14:44 crc kubenswrapper[4941]: I1130 08:14:44.867358 4941 scope.go:117] "RemoveContainer" containerID="9441cbaf0a0fe0fee05416b01ac3ec9b9cf7226a62599facb991873adb9baf8a" Nov 30 08:14:44 crc kubenswrapper[4941]: I1130 08:14:44.914410 4941 scope.go:117] "RemoveContainer" containerID="7d3b5f5301cc1537df9edacec0ffd3adc15f300539709971c6f121a8c78d4e0e" Nov 30 08:14:45 crc kubenswrapper[4941]: I1130 08:14:45.543625 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eddae14-af30-4521-a774-42753828eb43" path="/var/lib/kubelet/pods/9eddae14-af30-4521-a774-42753828eb43/volumes" Nov 30 08:14:46 crc kubenswrapper[4941]: I1130 08:14:46.522947 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:14:46 crc kubenswrapper[4941]: E1130 08:14:46.524200 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.049204 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2q6"] Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.055551 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.062599 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2q6"] Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.134194 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-catalog-content\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.134265 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-utilities\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.134488 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whx8f\" (UniqueName: \"kubernetes.io/projected/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-kube-api-access-whx8f\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.236102 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whx8f\" (UniqueName: \"kubernetes.io/projected/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-kube-api-access-whx8f\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.236204 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-catalog-content\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.236238 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-utilities\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.236887 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-utilities\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.237576 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-catalog-content\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.267502 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whx8f\" (UniqueName: \"kubernetes.io/projected/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-kube-api-access-whx8f\") pod \"redhat-marketplace-fg2q6\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.391024 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:14:55 crc kubenswrapper[4941]: I1130 08:14:55.866979 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2q6"] Nov 30 08:14:56 crc kubenswrapper[4941]: I1130 08:14:56.176396 4941 generic.go:334] "Generic (PLEG): container finished" podID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerID="d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2" exitCode=0 Nov 30 08:14:56 crc kubenswrapper[4941]: I1130 08:14:56.176881 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2q6" event={"ID":"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a","Type":"ContainerDied","Data":"d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2"} Nov 30 08:14:56 crc kubenswrapper[4941]: I1130 08:14:56.176994 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2q6" event={"ID":"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a","Type":"ContainerStarted","Data":"8fb4e3e02e8da757b486e7d9fa9c7c3b0cfced1097d119e6fded223e9fe0345f"} Nov 30 08:14:56 crc kubenswrapper[4941]: I1130 08:14:56.180791 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:14:57 crc kubenswrapper[4941]: I1130 08:14:57.197302 4941 generic.go:334] "Generic (PLEG): container finished" podID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerID="b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0" exitCode=0 Nov 30 08:14:57 crc kubenswrapper[4941]: I1130 08:14:57.197858 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2q6" event={"ID":"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a","Type":"ContainerDied","Data":"b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0"} Nov 30 08:14:58 crc kubenswrapper[4941]: I1130 08:14:58.056371 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-s68xn"] Nov 30 08:14:58 crc kubenswrapper[4941]: I1130 08:14:58.075924 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-s68xn"] Nov 30 08:14:58 crc kubenswrapper[4941]: I1130 08:14:58.213135 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2q6" event={"ID":"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a","Type":"ContainerStarted","Data":"61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0"} Nov 30 08:14:58 crc kubenswrapper[4941]: I1130 08:14:58.237061 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fg2q6" podStartSLOduration=1.642134134 podStartE2EDuration="3.237018547s" podCreationTimestamp="2025-11-30 08:14:55 +0000 UTC" firstStartedPulling="2025-11-30 08:14:56.180401303 +0000 UTC m=+5316.948572922" lastFinishedPulling="2025-11-30 08:14:57.775285716 +0000 UTC m=+5318.543457335" observedRunningTime="2025-11-30 08:14:58.234304653 +0000 UTC m=+5319.002476262" watchObservedRunningTime="2025-11-30 08:14:58.237018547 +0000 UTC m=+5319.005190196" Nov 30 08:14:59 crc kubenswrapper[4941]: I1130 08:14:59.535285 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7977ff2-8f69-4b0a-9985-653796e2cd9f" path="/var/lib/kubelet/pods/a7977ff2-8f69-4b0a-9985-653796e2cd9f/volumes" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.161394 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w"] Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.163310 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.166675 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.172024 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.202815 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w"] Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.267468 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph795\" (UniqueName: \"kubernetes.io/projected/8033082e-7879-45b1-bf6d-0a8ebccef646-kube-api-access-ph795\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.267608 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8033082e-7879-45b1-bf6d-0a8ebccef646-secret-volume\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.267786 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8033082e-7879-45b1-bf6d-0a8ebccef646-config-volume\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.372562 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8033082e-7879-45b1-bf6d-0a8ebccef646-config-volume\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.372673 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph795\" (UniqueName: \"kubernetes.io/projected/8033082e-7879-45b1-bf6d-0a8ebccef646-kube-api-access-ph795\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.372782 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8033082e-7879-45b1-bf6d-0a8ebccef646-secret-volume\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.374146 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8033082e-7879-45b1-bf6d-0a8ebccef646-config-volume\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.380401 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8033082e-7879-45b1-bf6d-0a8ebccef646-secret-volume\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.392268 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph795\" (UniqueName: \"kubernetes.io/projected/8033082e-7879-45b1-bf6d-0a8ebccef646-kube-api-access-ph795\") pod \"collect-profiles-29408175-wxv4w\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.485898 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:00 crc kubenswrapper[4941]: I1130 08:15:00.963514 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w"] Nov 30 08:15:01 crc kubenswrapper[4941]: I1130 08:15:01.249790 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" event={"ID":"8033082e-7879-45b1-bf6d-0a8ebccef646","Type":"ContainerStarted","Data":"71ebd700bebc680bcf6341a369cb2f1e7bcf789a3d9c1fc3d9c17d560fadc25f"} Nov 30 08:15:01 crc kubenswrapper[4941]: I1130 08:15:01.250206 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" event={"ID":"8033082e-7879-45b1-bf6d-0a8ebccef646","Type":"ContainerStarted","Data":"1e380242690f707582c474456a92b697568bc8b5f3710014f0b0aeac16073a69"} Nov 30 08:15:01 crc kubenswrapper[4941]: I1130 08:15:01.272029 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" podStartSLOduration=1.272006781 podStartE2EDuration="1.272006781s" podCreationTimestamp="2025-11-30 08:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:15:01.271079422 +0000 UTC m=+5322.039251031" watchObservedRunningTime="2025-11-30 08:15:01.272006781 +0000 UTC m=+5322.040178390" Nov 30 08:15:01 crc kubenswrapper[4941]: I1130 08:15:01.522200 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:15:01 crc kubenswrapper[4941]: E1130 08:15:01.522698 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:15:02 crc kubenswrapper[4941]: I1130 08:15:02.259814 4941 generic.go:334] "Generic (PLEG): container finished" podID="8033082e-7879-45b1-bf6d-0a8ebccef646" containerID="71ebd700bebc680bcf6341a369cb2f1e7bcf789a3d9c1fc3d9c17d560fadc25f" exitCode=0 Nov 30 08:15:02 crc kubenswrapper[4941]: I1130 08:15:02.259938 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" event={"ID":"8033082e-7879-45b1-bf6d-0a8ebccef646","Type":"ContainerDied","Data":"71ebd700bebc680bcf6341a369cb2f1e7bcf789a3d9c1fc3d9c17d560fadc25f"} Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.734951 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.756027 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8033082e-7879-45b1-bf6d-0a8ebccef646-secret-volume\") pod \"8033082e-7879-45b1-bf6d-0a8ebccef646\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.756126 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8033082e-7879-45b1-bf6d-0a8ebccef646-config-volume\") pod \"8033082e-7879-45b1-bf6d-0a8ebccef646\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.756377 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph795\" (UniqueName: \"kubernetes.io/projected/8033082e-7879-45b1-bf6d-0a8ebccef646-kube-api-access-ph795\") pod \"8033082e-7879-45b1-bf6d-0a8ebccef646\" (UID: \"8033082e-7879-45b1-bf6d-0a8ebccef646\") " Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.758514 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8033082e-7879-45b1-bf6d-0a8ebccef646-config-volume" (OuterVolumeSpecName: "config-volume") pod "8033082e-7879-45b1-bf6d-0a8ebccef646" (UID: "8033082e-7879-45b1-bf6d-0a8ebccef646"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.765879 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8033082e-7879-45b1-bf6d-0a8ebccef646-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8033082e-7879-45b1-bf6d-0a8ebccef646" (UID: "8033082e-7879-45b1-bf6d-0a8ebccef646"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.767117 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8033082e-7879-45b1-bf6d-0a8ebccef646-kube-api-access-ph795" (OuterVolumeSpecName: "kube-api-access-ph795") pod "8033082e-7879-45b1-bf6d-0a8ebccef646" (UID: "8033082e-7879-45b1-bf6d-0a8ebccef646"). InnerVolumeSpecName "kube-api-access-ph795". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.859449 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8033082e-7879-45b1-bf6d-0a8ebccef646-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.859506 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8033082e-7879-45b1-bf6d-0a8ebccef646-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:15:03 crc kubenswrapper[4941]: I1130 08:15:03.859521 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph795\" (UniqueName: \"kubernetes.io/projected/8033082e-7879-45b1-bf6d-0a8ebccef646-kube-api-access-ph795\") on node \"crc\" DevicePath \"\"" Nov 30 08:15:04 crc kubenswrapper[4941]: I1130 08:15:04.281369 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" event={"ID":"8033082e-7879-45b1-bf6d-0a8ebccef646","Type":"ContainerDied","Data":"1e380242690f707582c474456a92b697568bc8b5f3710014f0b0aeac16073a69"} Nov 30 08:15:04 crc kubenswrapper[4941]: I1130 08:15:04.281432 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e380242690f707582c474456a92b697568bc8b5f3710014f0b0aeac16073a69" Nov 30 08:15:04 crc kubenswrapper[4941]: I1130 08:15:04.281516 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w" Nov 30 08:15:04 crc kubenswrapper[4941]: I1130 08:15:04.378097 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp"] Nov 30 08:15:04 crc kubenswrapper[4941]: I1130 08:15:04.394886 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408130-9lpnp"] Nov 30 08:15:05 crc kubenswrapper[4941]: I1130 08:15:05.391717 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:15:05 crc kubenswrapper[4941]: I1130 08:15:05.392559 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:15:05 crc kubenswrapper[4941]: I1130 08:15:05.462468 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:15:05 crc kubenswrapper[4941]: I1130 08:15:05.541592 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="415240af-6b19-4e94-b812-539d0b5c5cb4" path="/var/lib/kubelet/pods/415240af-6b19-4e94-b812-539d0b5c5cb4/volumes" Nov 30 08:15:06 crc kubenswrapper[4941]: I1130 08:15:06.378818 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:15:06 crc kubenswrapper[4941]: I1130 08:15:06.454360 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2q6"] Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.328587 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fg2q6" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="registry-server" containerID="cri-o://61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0" gracePeriod=2 Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.829976 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.874050 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-catalog-content\") pod \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.874929 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whx8f\" (UniqueName: \"kubernetes.io/projected/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-kube-api-access-whx8f\") pod \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.874971 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-utilities\") pod \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\" (UID: \"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a\") " Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.876067 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-utilities" (OuterVolumeSpecName: "utilities") pod "4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" (UID: "4d0581e5-6853-407c-a7f0-3bc94e2f9d6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.891471 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-kube-api-access-whx8f" (OuterVolumeSpecName: "kube-api-access-whx8f") pod "4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" (UID: "4d0581e5-6853-407c-a7f0-3bc94e2f9d6a"). InnerVolumeSpecName "kube-api-access-whx8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.910610 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" (UID: "4d0581e5-6853-407c-a7f0-3bc94e2f9d6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.977907 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.977961 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whx8f\" (UniqueName: \"kubernetes.io/projected/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-kube-api-access-whx8f\") on node \"crc\" DevicePath \"\"" Nov 30 08:15:08 crc kubenswrapper[4941]: I1130 08:15:08.977973 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.354216 4941 generic.go:334] "Generic (PLEG): container finished" podID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerID="61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0" exitCode=0 Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.354286 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2q6" event={"ID":"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a","Type":"ContainerDied","Data":"61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0"} Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.354346 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fg2q6" event={"ID":"4d0581e5-6853-407c-a7f0-3bc94e2f9d6a","Type":"ContainerDied","Data":"8fb4e3e02e8da757b486e7d9fa9c7c3b0cfced1097d119e6fded223e9fe0345f"} Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.354388 4941 scope.go:117] "RemoveContainer" containerID="61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.354670 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fg2q6" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.380347 4941 scope.go:117] "RemoveContainer" containerID="b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.420201 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2q6"] Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.424232 4941 scope.go:117] "RemoveContainer" containerID="d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.431790 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fg2q6"] Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.447720 4941 scope.go:117] "RemoveContainer" containerID="61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0" Nov 30 08:15:09 crc kubenswrapper[4941]: E1130 08:15:09.448446 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0\": container with ID starting with 61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0 not found: ID does not exist" containerID="61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.448526 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0"} err="failed to get container status \"61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0\": rpc error: code = NotFound desc = could not find container \"61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0\": container with ID starting with 61b41c10fdb04f04013d07eef48e84bd9bec72ffc04408c24d8fb6c8d9bbbab0 not found: ID does not exist" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.448566 4941 scope.go:117] "RemoveContainer" containerID="b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0" Nov 30 08:15:09 crc kubenswrapper[4941]: E1130 08:15:09.449064 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0\": container with ID starting with b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0 not found: ID does not exist" containerID="b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.449125 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0"} err="failed to get container status \"b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0\": rpc error: code = NotFound desc = could not find container \"b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0\": container with ID starting with b5619be24c8de276cc643a542a0be347923a95d022761543235c2302d7cf4ec0 not found: ID does not exist" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.449164 4941 scope.go:117] "RemoveContainer" containerID="d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2" Nov 30 08:15:09 crc kubenswrapper[4941]: E1130 08:15:09.449536 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2\": container with ID starting with d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2 not found: ID does not exist" containerID="d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.449561 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2"} err="failed to get container status \"d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2\": rpc error: code = NotFound desc = could not find container \"d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2\": container with ID starting with d39c93607fbe16fdc0794eacc353c605b63bd2d01fc4b9be78b8266346146dd2 not found: ID does not exist" Nov 30 08:15:09 crc kubenswrapper[4941]: I1130 08:15:09.536293 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" path="/var/lib/kubelet/pods/4d0581e5-6853-407c-a7f0-3bc94e2f9d6a/volumes" Nov 30 08:15:12 crc kubenswrapper[4941]: I1130 08:15:12.522481 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:15:12 crc kubenswrapper[4941]: E1130 08:15:12.523537 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:15:26 crc kubenswrapper[4941]: I1130 08:15:26.522866 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:15:26 crc kubenswrapper[4941]: E1130 08:15:26.524711 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:15:41 crc kubenswrapper[4941]: I1130 08:15:41.522211 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:15:41 crc kubenswrapper[4941]: E1130 08:15:41.523215 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:15:45 crc kubenswrapper[4941]: I1130 08:15:45.222669 4941 scope.go:117] "RemoveContainer" containerID="90e0fc2554a575e6716cdbb1cbf0424a809888eb5cd020263b299cfcb93d5a9d" Nov 30 08:15:45 crc kubenswrapper[4941]: I1130 08:15:45.324420 4941 scope.go:117] "RemoveContainer" containerID="121941d06a3111ca355fab13cefa45b71dab8d050c04c6ee313e241825e01acb" Nov 30 08:15:45 crc kubenswrapper[4941]: I1130 08:15:45.378813 4941 scope.go:117] "RemoveContainer" containerID="fc50957275e6897af789e5368e4507822ada6ff6d712ee4209a191fc3712f021" Nov 30 08:15:56 crc kubenswrapper[4941]: I1130 08:15:56.523268 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:15:56 crc kubenswrapper[4941]: E1130 08:15:56.526052 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.360102 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-c89774c7-5gqsp"] Nov 30 08:16:07 crc kubenswrapper[4941]: E1130 08:16:07.361312 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="extract-content" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.361342 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="extract-content" Nov 30 08:16:07 crc kubenswrapper[4941]: E1130 08:16:07.361363 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="registry-server" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.361369 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="registry-server" Nov 30 08:16:07 crc kubenswrapper[4941]: E1130 08:16:07.361379 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="extract-utilities" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.361386 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="extract-utilities" Nov 30 08:16:07 crc kubenswrapper[4941]: E1130 08:16:07.361405 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8033082e-7879-45b1-bf6d-0a8ebccef646" containerName="collect-profiles" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.361410 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8033082e-7879-45b1-bf6d-0a8ebccef646" containerName="collect-profiles" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.361600 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d0581e5-6853-407c-a7f0-3bc94e2f9d6a" containerName="registry-server" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.361627 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8033082e-7879-45b1-bf6d-0a8ebccef646" containerName="collect-profiles" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.363031 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.369920 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.370073 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.370118 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.370315 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-vv5jp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.372871 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c89774c7-5gqsp"] Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.407028 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.407386 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-log" containerID="cri-o://23e89525e8ec1bbad26ebc0a81c674a22552a3370d24be1888065f2837044558" gracePeriod=30 Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.407560 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-httpd" containerID="cri-o://032392ac773741ecded3844a05c9ccffc22f4028abdc9447f16aa480b6be2dd5" gracePeriod=30 Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.438151 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-scripts\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.438215 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9776f793-a9cc-4ca4-9dc2-85d258cde161-horizon-secret-key\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.438296 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9776f793-a9cc-4ca4-9dc2-85d258cde161-logs\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.438377 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q8sx\" (UniqueName: \"kubernetes.io/projected/9776f793-a9cc-4ca4-9dc2-85d258cde161-kube-api-access-7q8sx\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.438472 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-config-data\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.465488 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7c8bc78fd7-bnkbz"] Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.467634 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.493414 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7c8bc78fd7-bnkbz"] Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.536131 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.536511 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-log" containerID="cri-o://9d5134cc3c3a66dac5b1d8a9c588d774c803bba9d8bc23c51706ff1df64c0d6b" gracePeriod=30 Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.536912 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-httpd" containerID="cri-o://565d0ba0f7bcd368e68e46139d3bb2d304a9ff85dc71366ee5c1e1dfa4c145f8" gracePeriod=30 Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541253 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-scripts\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541655 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9776f793-a9cc-4ca4-9dc2-85d258cde161-horizon-secret-key\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541730 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-config-data\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541753 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9776f793-a9cc-4ca4-9dc2-85d258cde161-logs\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541778 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q8sx\" (UniqueName: \"kubernetes.io/projected/9776f793-a9cc-4ca4-9dc2-85d258cde161-kube-api-access-7q8sx\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541809 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c5nj\" (UniqueName: \"kubernetes.io/projected/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-kube-api-access-8c5nj\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541838 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-scripts\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541862 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-horizon-secret-key\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541901 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-logs\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.541934 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-config-data\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.544532 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-scripts\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.545705 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9776f793-a9cc-4ca4-9dc2-85d258cde161-logs\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.547630 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-config-data\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.553635 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9776f793-a9cc-4ca4-9dc2-85d258cde161-horizon-secret-key\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.568771 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q8sx\" (UniqueName: \"kubernetes.io/projected/9776f793-a9cc-4ca4-9dc2-85d258cde161-kube-api-access-7q8sx\") pod \"horizon-c89774c7-5gqsp\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.644225 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-horizon-secret-key\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.644375 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-logs\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.644665 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-config-data\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.644733 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c5nj\" (UniqueName: \"kubernetes.io/projected/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-kube-api-access-8c5nj\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.644797 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-scripts\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.644868 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-logs\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.646122 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-scripts\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.646807 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-config-data\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.650061 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-horizon-secret-key\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.673459 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c5nj\" (UniqueName: \"kubernetes.io/projected/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-kube-api-access-8c5nj\") pod \"horizon-7c8bc78fd7-bnkbz\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.689227 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:07 crc kubenswrapper[4941]: I1130 08:16:07.805504 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.071920 4941 generic.go:334] "Generic (PLEG): container finished" podID="88976074-47b8-4a90-9509-d2056016dcaa" containerID="23e89525e8ec1bbad26ebc0a81c674a22552a3370d24be1888065f2837044558" exitCode=143 Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.072103 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"88976074-47b8-4a90-9509-d2056016dcaa","Type":"ContainerDied","Data":"23e89525e8ec1bbad26ebc0a81c674a22552a3370d24be1888065f2837044558"} Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.079395 4941 generic.go:334] "Generic (PLEG): container finished" podID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerID="9d5134cc3c3a66dac5b1d8a9c588d774c803bba9d8bc23c51706ff1df64c0d6b" exitCode=143 Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.079460 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5a62427-4986-40d5-a842-9ac48dbd0a21","Type":"ContainerDied","Data":"9d5134cc3c3a66dac5b1d8a9c588d774c803bba9d8bc23c51706ff1df64c0d6b"} Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.161985 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c8bc78fd7-bnkbz"] Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.206299 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c89774c7-5gqsp"] Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.221291 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-849bf75bb9-frr7k"] Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.226174 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.257743 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-849bf75bb9-frr7k"] Nov 30 08:16:08 crc kubenswrapper[4941]: W1130 08:16:08.340783 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc29b9f4d_dde1_4a7d_93e7_1b66223aba61.slice/crio-f3786ca6127af7bd9ebaa96a30bb0e1ff28ef8a68bd8c1619240fc5a40dce959 WatchSource:0}: Error finding container f3786ca6127af7bd9ebaa96a30bb0e1ff28ef8a68bd8c1619240fc5a40dce959: Status 404 returned error can't find the container with id f3786ca6127af7bd9ebaa96a30bb0e1ff28ef8a68bd8c1619240fc5a40dce959 Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.342532 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c8bc78fd7-bnkbz"] Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.362249 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-horizon-secret-key\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.362342 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-logs\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.362372 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6tpq\" (UniqueName: \"kubernetes.io/projected/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-kube-api-access-c6tpq\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.362403 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-scripts\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.362432 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-config-data\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.464231 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-horizon-secret-key\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.464403 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-logs\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.464437 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6tpq\" (UniqueName: \"kubernetes.io/projected/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-kube-api-access-c6tpq\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.464469 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-scripts\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.464505 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-config-data\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.465170 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-logs\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.465745 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-scripts\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.466637 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-config-data\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.472457 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-horizon-secret-key\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.485365 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6tpq\" (UniqueName: \"kubernetes.io/projected/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-kube-api-access-c6tpq\") pod \"horizon-849bf75bb9-frr7k\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:08 crc kubenswrapper[4941]: I1130 08:16:08.550360 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:09 crc kubenswrapper[4941]: I1130 08:16:09.091476 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8bc78fd7-bnkbz" event={"ID":"c29b9f4d-dde1-4a7d-93e7-1b66223aba61","Type":"ContainerStarted","Data":"f3786ca6127af7bd9ebaa96a30bb0e1ff28ef8a68bd8c1619240fc5a40dce959"} Nov 30 08:16:09 crc kubenswrapper[4941]: I1130 08:16:09.098105 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c89774c7-5gqsp" event={"ID":"9776f793-a9cc-4ca4-9dc2-85d258cde161","Type":"ContainerStarted","Data":"44e10722fa9218168e01a3233873cd546041aaaa0a1500452f607749ee657f6c"} Nov 30 08:16:09 crc kubenswrapper[4941]: I1130 08:16:09.158527 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-849bf75bb9-frr7k"] Nov 30 08:16:09 crc kubenswrapper[4941]: W1130 08:16:09.160498 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fea21ec_32aa_4c9a_8794_9fe7ca69e135.slice/crio-6acb3f7da21175d54dff2030c6021e7baa394f7fe43c17e3c58f754fabcf419d WatchSource:0}: Error finding container 6acb3f7da21175d54dff2030c6021e7baa394f7fe43c17e3c58f754fabcf419d: Status 404 returned error can't find the container with id 6acb3f7da21175d54dff2030c6021e7baa394f7fe43c17e3c58f754fabcf419d Nov 30 08:16:10 crc kubenswrapper[4941]: I1130 08:16:10.111654 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-849bf75bb9-frr7k" event={"ID":"4fea21ec-32aa-4c9a-8794-9fe7ca69e135","Type":"ContainerStarted","Data":"6acb3f7da21175d54dff2030c6021e7baa394f7fe43c17e3c58f754fabcf419d"} Nov 30 08:16:10 crc kubenswrapper[4941]: I1130 08:16:10.522827 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:16:10 crc kubenswrapper[4941]: E1130 08:16:10.523126 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.153519 4941 generic.go:334] "Generic (PLEG): container finished" podID="88976074-47b8-4a90-9509-d2056016dcaa" containerID="032392ac773741ecded3844a05c9ccffc22f4028abdc9447f16aa480b6be2dd5" exitCode=0 Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.153992 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"88976074-47b8-4a90-9509-d2056016dcaa","Type":"ContainerDied","Data":"032392ac773741ecded3844a05c9ccffc22f4028abdc9447f16aa480b6be2dd5"} Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.157508 4941 generic.go:334] "Generic (PLEG): container finished" podID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerID="565d0ba0f7bcd368e68e46139d3bb2d304a9ff85dc71366ee5c1e1dfa4c145f8" exitCode=0 Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.157541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5a62427-4986-40d5-a842-9ac48dbd0a21","Type":"ContainerDied","Data":"565d0ba0f7bcd368e68e46139d3bb2d304a9ff85dc71366ee5c1e1dfa4c145f8"} Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.336605 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.346338 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435037 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4hss\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-kube-api-access-g4hss\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435169 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-ceph\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435239 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-logs\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435267 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-logs\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435286 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8clm\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-kube-api-access-l8clm\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435364 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-ceph\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435417 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-scripts\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435438 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-combined-ca-bundle\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435499 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-config-data\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435521 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-httpd-run\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435539 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-combined-ca-bundle\") pod \"d5a62427-4986-40d5-a842-9ac48dbd0a21\" (UID: \"d5a62427-4986-40d5-a842-9ac48dbd0a21\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435555 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-scripts\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435588 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-httpd-run\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435631 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-config-data\") pod \"88976074-47b8-4a90-9509-d2056016dcaa\" (UID: \"88976074-47b8-4a90-9509-d2056016dcaa\") " Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.435665 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-logs" (OuterVolumeSpecName: "logs") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.436028 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.442951 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-logs" (OuterVolumeSpecName: "logs") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.445452 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.445560 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-kube-api-access-l8clm" (OuterVolumeSpecName: "kube-api-access-l8clm") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "kube-api-access-l8clm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.445940 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.446501 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-kube-api-access-g4hss" (OuterVolumeSpecName: "kube-api-access-g4hss") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "kube-api-access-g4hss". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.455586 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-ceph" (OuterVolumeSpecName: "ceph") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.473032 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-scripts" (OuterVolumeSpecName: "scripts") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.473146 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-scripts" (OuterVolumeSpecName: "scripts") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.473229 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-ceph" (OuterVolumeSpecName: "ceph") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.480298 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.484518 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.508520 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-config-data" (OuterVolumeSpecName: "config-data") pod "88976074-47b8-4a90-9509-d2056016dcaa" (UID: "88976074-47b8-4a90-9509-d2056016dcaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.520522 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-config-data" (OuterVolumeSpecName: "config-data") pod "d5a62427-4986-40d5-a842-9ac48dbd0a21" (UID: "d5a62427-4986-40d5-a842-9ac48dbd0a21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538046 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538080 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538090 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4hss\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-kube-api-access-g4hss\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538101 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/d5a62427-4986-40d5-a842-9ac48dbd0a21-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538112 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88976074-47b8-4a90-9509-d2056016dcaa-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538123 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8clm\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-kube-api-access-l8clm\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538132 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/88976074-47b8-4a90-9509-d2056016dcaa-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538140 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538149 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538158 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538166 4941 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d5a62427-4986-40d5-a842-9ac48dbd0a21-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538176 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a62427-4986-40d5-a842-9ac48dbd0a21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:11 crc kubenswrapper[4941]: I1130 08:16:11.538185 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88976074-47b8-4a90-9509-d2056016dcaa-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.186998 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d5a62427-4986-40d5-a842-9ac48dbd0a21","Type":"ContainerDied","Data":"753641fb758bbbfeefa11a5666edb9a261721dcdd99944fe07de3240e702402e"} Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.187615 4941 scope.go:117] "RemoveContainer" containerID="565d0ba0f7bcd368e68e46139d3bb2d304a9ff85dc71366ee5c1e1dfa4c145f8" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.187033 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.190250 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"88976074-47b8-4a90-9509-d2056016dcaa","Type":"ContainerDied","Data":"017a4701932556c3fe363f1ebfe851ef86a7faa4512ae89e892846ec086cfe00"} Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.190360 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.227227 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.241287 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.257494 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.269400 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.282439 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: E1130 08:16:12.282962 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-log" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.282980 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-log" Nov 30 08:16:12 crc kubenswrapper[4941]: E1130 08:16:12.283010 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-log" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283016 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-log" Nov 30 08:16:12 crc kubenswrapper[4941]: E1130 08:16:12.283044 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-httpd" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283050 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-httpd" Nov 30 08:16:12 crc kubenswrapper[4941]: E1130 08:16:12.283066 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-httpd" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283072 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-httpd" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283236 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-httpd" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283253 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-log" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283265 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" containerName="glance-log" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.283279 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="88976074-47b8-4a90-9509-d2056016dcaa" containerName="glance-httpd" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.284304 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.291115 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.291300 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.291123 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-m5w9r" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.299480 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.314942 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.320792 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.324927 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.326729 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.359812 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.359981 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360063 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/307bea18-b42b-4fb3-a880-90208b196d4c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360116 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a779aa82-744f-4b58-b795-8a04ae715a62-ceph\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360176 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-scripts\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360218 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88cq\" (UniqueName: \"kubernetes.io/projected/a779aa82-744f-4b58-b795-8a04ae715a62-kube-api-access-v88cq\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360265 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjr5x\" (UniqueName: \"kubernetes.io/projected/307bea18-b42b-4fb3-a880-90208b196d4c-kube-api-access-qjr5x\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360356 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-config-data\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360431 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a779aa82-744f-4b58-b795-8a04ae715a62-logs\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360511 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a779aa82-744f-4b58-b795-8a04ae715a62-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360539 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360562 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/307bea18-b42b-4fb3-a880-90208b196d4c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.360597 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/307bea18-b42b-4fb3-a880-90208b196d4c-logs\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463124 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a779aa82-744f-4b58-b795-8a04ae715a62-logs\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463235 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a779aa82-744f-4b58-b795-8a04ae715a62-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463265 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463291 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/307bea18-b42b-4fb3-a880-90208b196d4c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463312 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/307bea18-b42b-4fb3-a880-90208b196d4c-logs\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463510 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463576 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463603 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463634 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/307bea18-b42b-4fb3-a880-90208b196d4c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463675 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a779aa82-744f-4b58-b795-8a04ae715a62-ceph\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463719 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-scripts\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463765 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88cq\" (UniqueName: \"kubernetes.io/projected/a779aa82-744f-4b58-b795-8a04ae715a62-kube-api-access-v88cq\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463798 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjr5x\" (UniqueName: \"kubernetes.io/projected/307bea18-b42b-4fb3-a880-90208b196d4c-kube-api-access-qjr5x\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.463861 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-config-data\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.465895 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/307bea18-b42b-4fb3-a880-90208b196d4c-logs\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.466225 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/307bea18-b42b-4fb3-a880-90208b196d4c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.466689 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a779aa82-744f-4b58-b795-8a04ae715a62-logs\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.466780 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a779aa82-744f-4b58-b795-8a04ae715a62-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.470658 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.470689 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.481342 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/a779aa82-744f-4b58-b795-8a04ae715a62-ceph\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.481524 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.481657 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-config-data\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.481834 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a779aa82-744f-4b58-b795-8a04ae715a62-scripts\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.481921 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/307bea18-b42b-4fb3-a880-90208b196d4c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.482437 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/307bea18-b42b-4fb3-a880-90208b196d4c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.484737 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjr5x\" (UniqueName: \"kubernetes.io/projected/307bea18-b42b-4fb3-a880-90208b196d4c-kube-api-access-qjr5x\") pod \"glance-default-internal-api-0\" (UID: \"307bea18-b42b-4fb3-a880-90208b196d4c\") " pod="openstack/glance-default-internal-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.485743 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88cq\" (UniqueName: \"kubernetes.io/projected/a779aa82-744f-4b58-b795-8a04ae715a62-kube-api-access-v88cq\") pod \"glance-default-external-api-0\" (UID: \"a779aa82-744f-4b58-b795-8a04ae715a62\") " pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.623707 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 30 08:16:12 crc kubenswrapper[4941]: I1130 08:16:12.646922 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:13 crc kubenswrapper[4941]: I1130 08:16:13.537264 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88976074-47b8-4a90-9509-d2056016dcaa" path="/var/lib/kubelet/pods/88976074-47b8-4a90-9509-d2056016dcaa/volumes" Nov 30 08:16:13 crc kubenswrapper[4941]: I1130 08:16:13.538487 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5a62427-4986-40d5-a842-9ac48dbd0a21" path="/var/lib/kubelet/pods/d5a62427-4986-40d5-a842-9ac48dbd0a21/volumes" Nov 30 08:16:16 crc kubenswrapper[4941]: I1130 08:16:16.485989 4941 scope.go:117] "RemoveContainer" containerID="9d5134cc3c3a66dac5b1d8a9c588d774c803bba9d8bc23c51706ff1df64c0d6b" Nov 30 08:16:16 crc kubenswrapper[4941]: I1130 08:16:16.549630 4941 scope.go:117] "RemoveContainer" containerID="032392ac773741ecded3844a05c9ccffc22f4028abdc9447f16aa480b6be2dd5" Nov 30 08:16:16 crc kubenswrapper[4941]: I1130 08:16:16.694227 4941 scope.go:117] "RemoveContainer" containerID="23e89525e8ec1bbad26ebc0a81c674a22552a3370d24be1888065f2837044558" Nov 30 08:16:17 crc kubenswrapper[4941]: I1130 08:16:17.268447 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c89774c7-5gqsp" event={"ID":"9776f793-a9cc-4ca4-9dc2-85d258cde161","Type":"ContainerStarted","Data":"3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8"} Nov 30 08:16:17 crc kubenswrapper[4941]: I1130 08:16:17.274237 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-849bf75bb9-frr7k" event={"ID":"4fea21ec-32aa-4c9a-8794-9fe7ca69e135","Type":"ContainerStarted","Data":"5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b"} Nov 30 08:16:17 crc kubenswrapper[4941]: I1130 08:16:17.276958 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8bc78fd7-bnkbz" event={"ID":"c29b9f4d-dde1-4a7d-93e7-1b66223aba61","Type":"ContainerStarted","Data":"f37b2b57ff53066afc62e5cd17e46d0f37076c07384c8bc8388ae70b9f2216e6"} Nov 30 08:16:17 crc kubenswrapper[4941]: I1130 08:16:17.354709 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 30 08:16:17 crc kubenswrapper[4941]: I1130 08:16:17.963048 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 30 08:16:17 crc kubenswrapper[4941]: W1130 08:16:17.965813 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod307bea18_b42b_4fb3_a880_90208b196d4c.slice/crio-36051c00c383185dddcc4237c411181648acd820f8535739a44b729f39bbf268 WatchSource:0}: Error finding container 36051c00c383185dddcc4237c411181648acd820f8535739a44b729f39bbf268: Status 404 returned error can't find the container with id 36051c00c383185dddcc4237c411181648acd820f8535739a44b729f39bbf268 Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.292403 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a779aa82-744f-4b58-b795-8a04ae715a62","Type":"ContainerStarted","Data":"fe356873628f77df9d316b756efa875e73a03e798e8d89dc420b7f685c5e5c4a"} Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.292452 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a779aa82-744f-4b58-b795-8a04ae715a62","Type":"ContainerStarted","Data":"a28224172b71b40ef908b3e7071d2e6842f36f71a0206695dc851b2c7cd99962"} Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.294256 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"307bea18-b42b-4fb3-a880-90208b196d4c","Type":"ContainerStarted","Data":"36051c00c383185dddcc4237c411181648acd820f8535739a44b729f39bbf268"} Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.299615 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-849bf75bb9-frr7k" event={"ID":"4fea21ec-32aa-4c9a-8794-9fe7ca69e135","Type":"ContainerStarted","Data":"8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549"} Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.303362 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8bc78fd7-bnkbz" event={"ID":"c29b9f4d-dde1-4a7d-93e7-1b66223aba61","Type":"ContainerStarted","Data":"7133537b41cd56b96793f775afe54f5a08395031b92eed01c2834743b1268106"} Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.303523 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7c8bc78fd7-bnkbz" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon-log" containerID="cri-o://f37b2b57ff53066afc62e5cd17e46d0f37076c07384c8bc8388ae70b9f2216e6" gracePeriod=30 Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.303600 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7c8bc78fd7-bnkbz" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon" containerID="cri-o://7133537b41cd56b96793f775afe54f5a08395031b92eed01c2834743b1268106" gracePeriod=30 Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.319120 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c89774c7-5gqsp" event={"ID":"9776f793-a9cc-4ca4-9dc2-85d258cde161","Type":"ContainerStarted","Data":"bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3"} Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.326865 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-849bf75bb9-frr7k" podStartSLOduration=2.7699357669999998 podStartE2EDuration="10.326840116s" podCreationTimestamp="2025-11-30 08:16:08 +0000 UTC" firstStartedPulling="2025-11-30 08:16:09.163709363 +0000 UTC m=+5389.931880992" lastFinishedPulling="2025-11-30 08:16:16.720613712 +0000 UTC m=+5397.488785341" observedRunningTime="2025-11-30 08:16:18.318026663 +0000 UTC m=+5399.086198272" watchObservedRunningTime="2025-11-30 08:16:18.326840116 +0000 UTC m=+5399.095011725" Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.346123 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7c8bc78fd7-bnkbz" podStartSLOduration=2.995799097 podStartE2EDuration="11.346102402s" podCreationTimestamp="2025-11-30 08:16:07 +0000 UTC" firstStartedPulling="2025-11-30 08:16:08.343577709 +0000 UTC m=+5389.111749318" lastFinishedPulling="2025-11-30 08:16:16.693880974 +0000 UTC m=+5397.462052623" observedRunningTime="2025-11-30 08:16:18.342615674 +0000 UTC m=+5399.110787283" watchObservedRunningTime="2025-11-30 08:16:18.346102402 +0000 UTC m=+5399.114274001" Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.374065 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-c89774c7-5gqsp" podStartSLOduration=2.871331243 podStartE2EDuration="11.374043436s" podCreationTimestamp="2025-11-30 08:16:07 +0000 UTC" firstStartedPulling="2025-11-30 08:16:08.215437863 +0000 UTC m=+5388.983609472" lastFinishedPulling="2025-11-30 08:16:16.718150046 +0000 UTC m=+5397.486321665" observedRunningTime="2025-11-30 08:16:18.370498437 +0000 UTC m=+5399.138670046" watchObservedRunningTime="2025-11-30 08:16:18.374043436 +0000 UTC m=+5399.142215045" Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.551376 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:18 crc kubenswrapper[4941]: I1130 08:16:18.551939 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:19 crc kubenswrapper[4941]: I1130 08:16:19.335284 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a779aa82-744f-4b58-b795-8a04ae715a62","Type":"ContainerStarted","Data":"81aa5a94618cd31579869f3434db5b4f672378b3ccf43b731403a5d9d4fe2448"} Nov 30 08:16:19 crc kubenswrapper[4941]: I1130 08:16:19.341188 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"307bea18-b42b-4fb3-a880-90208b196d4c","Type":"ContainerStarted","Data":"ab252b63a8a8ab7c6f32e2564cb9fd74e09dd0408bb4b3e780e141239f24d6f3"} Nov 30 08:16:19 crc kubenswrapper[4941]: I1130 08:16:19.369032 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.369008182 podStartE2EDuration="7.369008182s" podCreationTimestamp="2025-11-30 08:16:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:16:19.362931353 +0000 UTC m=+5400.131102972" watchObservedRunningTime="2025-11-30 08:16:19.369008182 +0000 UTC m=+5400.137179791" Nov 30 08:16:19 crc kubenswrapper[4941]: I1130 08:16:19.406257 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.406233013 podStartE2EDuration="7.406233013s" podCreationTimestamp="2025-11-30 08:16:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:16:19.39225381 +0000 UTC m=+5400.160425419" watchObservedRunningTime="2025-11-30 08:16:19.406233013 +0000 UTC m=+5400.174404622" Nov 30 08:16:20 crc kubenswrapper[4941]: I1130 08:16:20.357752 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"307bea18-b42b-4fb3-a880-90208b196d4c","Type":"ContainerStarted","Data":"d5a0432b95b7a5ee77716c3b7e2dd0af375b6b0bc89af8e5808e949e48dd5a06"} Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.522279 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:16:22 crc kubenswrapper[4941]: E1130 08:16:22.522867 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.625625 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.625693 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.647480 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.647546 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.670218 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.684247 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.702243 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:22 crc kubenswrapper[4941]: I1130 08:16:22.716608 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:23 crc kubenswrapper[4941]: I1130 08:16:23.389668 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:23 crc kubenswrapper[4941]: I1130 08:16:23.389739 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:23 crc kubenswrapper[4941]: I1130 08:16:23.389753 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 08:16:23 crc kubenswrapper[4941]: I1130 08:16:23.389768 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 30 08:16:25 crc kubenswrapper[4941]: I1130 08:16:25.866097 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 08:16:25 crc kubenswrapper[4941]: I1130 08:16:25.867123 4941 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 30 08:16:25 crc kubenswrapper[4941]: I1130 08:16:25.870128 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 30 08:16:25 crc kubenswrapper[4941]: I1130 08:16:25.898541 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:26 crc kubenswrapper[4941]: I1130 08:16:26.690432 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 30 08:16:27 crc kubenswrapper[4941]: I1130 08:16:27.689395 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:27 crc kubenswrapper[4941]: I1130 08:16:27.691130 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:27 crc kubenswrapper[4941]: I1130 08:16:27.693606 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c89774c7-5gqsp" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Nov 30 08:16:27 crc kubenswrapper[4941]: I1130 08:16:27.806236 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:28 crc kubenswrapper[4941]: I1130 08:16:28.554427 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-849bf75bb9-frr7k" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.89:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.89:8080: connect: connection refused" Nov 30 08:16:37 crc kubenswrapper[4941]: I1130 08:16:37.522604 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:16:37 crc kubenswrapper[4941]: E1130 08:16:37.526788 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:16:39 crc kubenswrapper[4941]: I1130 08:16:39.623945 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:40 crc kubenswrapper[4941]: I1130 08:16:40.330078 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:41 crc kubenswrapper[4941]: I1130 08:16:41.547918 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:16:42 crc kubenswrapper[4941]: I1130 08:16:42.093676 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:16:42 crc kubenswrapper[4941]: I1130 08:16:42.179436 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c89774c7-5gqsp"] Nov 30 08:16:42 crc kubenswrapper[4941]: I1130 08:16:42.179710 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c89774c7-5gqsp" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon-log" containerID="cri-o://3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8" gracePeriod=30 Nov 30 08:16:42 crc kubenswrapper[4941]: I1130 08:16:42.180244 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c89774c7-5gqsp" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" containerID="cri-o://bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3" gracePeriod=30 Nov 30 08:16:45 crc kubenswrapper[4941]: I1130 08:16:45.724702 4941 generic.go:334] "Generic (PLEG): container finished" podID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerID="bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3" exitCode=0 Nov 30 08:16:45 crc kubenswrapper[4941]: I1130 08:16:45.725116 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c89774c7-5gqsp" event={"ID":"9776f793-a9cc-4ca4-9dc2-85d258cde161","Type":"ContainerDied","Data":"bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3"} Nov 30 08:16:47 crc kubenswrapper[4941]: I1130 08:16:47.690936 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-c89774c7-5gqsp" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.765594 4941 generic.go:334] "Generic (PLEG): container finished" podID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerID="7133537b41cd56b96793f775afe54f5a08395031b92eed01c2834743b1268106" exitCode=137 Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.767103 4941 generic.go:334] "Generic (PLEG): container finished" podID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerID="f37b2b57ff53066afc62e5cd17e46d0f37076c07384c8bc8388ae70b9f2216e6" exitCode=137 Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.765665 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8bc78fd7-bnkbz" event={"ID":"c29b9f4d-dde1-4a7d-93e7-1b66223aba61","Type":"ContainerDied","Data":"7133537b41cd56b96793f775afe54f5a08395031b92eed01c2834743b1268106"} Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.767164 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8bc78fd7-bnkbz" event={"ID":"c29b9f4d-dde1-4a7d-93e7-1b66223aba61","Type":"ContainerDied","Data":"f37b2b57ff53066afc62e5cd17e46d0f37076c07384c8bc8388ae70b9f2216e6"} Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.767183 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7c8bc78fd7-bnkbz" event={"ID":"c29b9f4d-dde1-4a7d-93e7-1b66223aba61","Type":"ContainerDied","Data":"f3786ca6127af7bd9ebaa96a30bb0e1ff28ef8a68bd8c1619240fc5a40dce959"} Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.767195 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3786ca6127af7bd9ebaa96a30bb0e1ff28ef8a68bd8c1619240fc5a40dce959" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.828741 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.922788 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-horizon-secret-key\") pod \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.923218 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-scripts\") pod \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.923298 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c5nj\" (UniqueName: \"kubernetes.io/projected/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-kube-api-access-8c5nj\") pod \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.923418 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-logs\") pod \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.923540 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-config-data\") pod \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\" (UID: \"c29b9f4d-dde1-4a7d-93e7-1b66223aba61\") " Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.924071 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-logs" (OuterVolumeSpecName: "logs") pod "c29b9f4d-dde1-4a7d-93e7-1b66223aba61" (UID: "c29b9f4d-dde1-4a7d-93e7-1b66223aba61"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.925131 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.930107 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-kube-api-access-8c5nj" (OuterVolumeSpecName: "kube-api-access-8c5nj") pod "c29b9f4d-dde1-4a7d-93e7-1b66223aba61" (UID: "c29b9f4d-dde1-4a7d-93e7-1b66223aba61"). InnerVolumeSpecName "kube-api-access-8c5nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.949763 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c29b9f4d-dde1-4a7d-93e7-1b66223aba61" (UID: "c29b9f4d-dde1-4a7d-93e7-1b66223aba61"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.953619 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-config-data" (OuterVolumeSpecName: "config-data") pod "c29b9f4d-dde1-4a7d-93e7-1b66223aba61" (UID: "c29b9f4d-dde1-4a7d-93e7-1b66223aba61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:16:48 crc kubenswrapper[4941]: I1130 08:16:48.975451 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-scripts" (OuterVolumeSpecName: "scripts") pod "c29b9f4d-dde1-4a7d-93e7-1b66223aba61" (UID: "c29b9f4d-dde1-4a7d-93e7-1b66223aba61"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.027915 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.027975 4941 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.028002 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.028022 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c5nj\" (UniqueName: \"kubernetes.io/projected/c29b9f4d-dde1-4a7d-93e7-1b66223aba61-kube-api-access-8c5nj\") on node \"crc\" DevicePath \"\"" Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.778954 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7c8bc78fd7-bnkbz" Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.823884 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7c8bc78fd7-bnkbz"] Nov 30 08:16:49 crc kubenswrapper[4941]: I1130 08:16:49.836049 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7c8bc78fd7-bnkbz"] Nov 30 08:16:50 crc kubenswrapper[4941]: I1130 08:16:50.523003 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:16:50 crc kubenswrapper[4941]: E1130 08:16:50.523390 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:16:51 crc kubenswrapper[4941]: I1130 08:16:51.544645 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" path="/var/lib/kubelet/pods/c29b9f4d-dde1-4a7d-93e7-1b66223aba61/volumes" Nov 30 08:16:57 crc kubenswrapper[4941]: I1130 08:16:57.690224 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-c89774c7-5gqsp" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Nov 30 08:17:05 crc kubenswrapper[4941]: I1130 08:17:05.522247 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:17:05 crc kubenswrapper[4941]: E1130 08:17:05.523658 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:17:07 crc kubenswrapper[4941]: I1130 08:17:07.690635 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-c89774c7-5gqsp" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.87:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.87:8080: connect: connection refused" Nov 30 08:17:07 crc kubenswrapper[4941]: I1130 08:17:07.691254 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.702899 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.719279 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-scripts\") pod \"9776f793-a9cc-4ca4-9dc2-85d258cde161\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.719473 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-config-data\") pod \"9776f793-a9cc-4ca4-9dc2-85d258cde161\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.719516 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9776f793-a9cc-4ca4-9dc2-85d258cde161-logs\") pod \"9776f793-a9cc-4ca4-9dc2-85d258cde161\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.719593 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9776f793-a9cc-4ca4-9dc2-85d258cde161-horizon-secret-key\") pod \"9776f793-a9cc-4ca4-9dc2-85d258cde161\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.719639 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q8sx\" (UniqueName: \"kubernetes.io/projected/9776f793-a9cc-4ca4-9dc2-85d258cde161-kube-api-access-7q8sx\") pod \"9776f793-a9cc-4ca4-9dc2-85d258cde161\" (UID: \"9776f793-a9cc-4ca4-9dc2-85d258cde161\") " Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.720209 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9776f793-a9cc-4ca4-9dc2-85d258cde161-logs" (OuterVolumeSpecName: "logs") pod "9776f793-a9cc-4ca4-9dc2-85d258cde161" (UID: "9776f793-a9cc-4ca4-9dc2-85d258cde161"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.726363 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9776f793-a9cc-4ca4-9dc2-85d258cde161-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9776f793-a9cc-4ca4-9dc2-85d258cde161" (UID: "9776f793-a9cc-4ca4-9dc2-85d258cde161"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.726891 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9776f793-a9cc-4ca4-9dc2-85d258cde161-kube-api-access-7q8sx" (OuterVolumeSpecName: "kube-api-access-7q8sx") pod "9776f793-a9cc-4ca4-9dc2-85d258cde161" (UID: "9776f793-a9cc-4ca4-9dc2-85d258cde161"). InnerVolumeSpecName "kube-api-access-7q8sx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.760376 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-scripts" (OuterVolumeSpecName: "scripts") pod "9776f793-a9cc-4ca4-9dc2-85d258cde161" (UID: "9776f793-a9cc-4ca4-9dc2-85d258cde161"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.765815 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-config-data" (OuterVolumeSpecName: "config-data") pod "9776f793-a9cc-4ca4-9dc2-85d258cde161" (UID: "9776f793-a9cc-4ca4-9dc2-85d258cde161"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.820805 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.821073 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9776f793-a9cc-4ca4-9dc2-85d258cde161-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.821140 4941 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9776f793-a9cc-4ca4-9dc2-85d258cde161-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.821227 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q8sx\" (UniqueName: \"kubernetes.io/projected/9776f793-a9cc-4ca4-9dc2-85d258cde161-kube-api-access-7q8sx\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:12 crc kubenswrapper[4941]: I1130 08:17:12.821292 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9776f793-a9cc-4ca4-9dc2-85d258cde161-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.143467 4941 generic.go:334] "Generic (PLEG): container finished" podID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerID="3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8" exitCode=137 Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.143515 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c89774c7-5gqsp" event={"ID":"9776f793-a9cc-4ca4-9dc2-85d258cde161","Type":"ContainerDied","Data":"3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8"} Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.143543 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c89774c7-5gqsp" event={"ID":"9776f793-a9cc-4ca4-9dc2-85d258cde161","Type":"ContainerDied","Data":"44e10722fa9218168e01a3233873cd546041aaaa0a1500452f607749ee657f6c"} Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.143561 4941 scope.go:117] "RemoveContainer" containerID="bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.143635 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c89774c7-5gqsp" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.198688 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c89774c7-5gqsp"] Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.206763 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-c89774c7-5gqsp"] Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.367455 4941 scope.go:117] "RemoveContainer" containerID="3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.401708 4941 scope.go:117] "RemoveContainer" containerID="bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3" Nov 30 08:17:13 crc kubenswrapper[4941]: E1130 08:17:13.402522 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3\": container with ID starting with bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3 not found: ID does not exist" containerID="bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.402615 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3"} err="failed to get container status \"bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3\": rpc error: code = NotFound desc = could not find container \"bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3\": container with ID starting with bc8b9b0d8526676716667418dc517b8a72378dbbf228457545d0e0bf1fd26dd3 not found: ID does not exist" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.402690 4941 scope.go:117] "RemoveContainer" containerID="3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8" Nov 30 08:17:13 crc kubenswrapper[4941]: E1130 08:17:13.403363 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8\": container with ID starting with 3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8 not found: ID does not exist" containerID="3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.403412 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8"} err="failed to get container status \"3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8\": rpc error: code = NotFound desc = could not find container \"3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8\": container with ID starting with 3173f5680f9328d0ed0dbc6e862daabc8f6add8b09d39025a2988c436d0a24e8 not found: ID does not exist" Nov 30 08:17:13 crc kubenswrapper[4941]: I1130 08:17:13.543798 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" path="/var/lib/kubelet/pods/9776f793-a9cc-4ca4-9dc2-85d258cde161/volumes" Nov 30 08:17:19 crc kubenswrapper[4941]: I1130 08:17:19.055760 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-5d2d-account-create-update-kdc28"] Nov 30 08:17:19 crc kubenswrapper[4941]: I1130 08:17:19.068391 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-zkgfv"] Nov 30 08:17:19 crc kubenswrapper[4941]: I1130 08:17:19.080229 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-zkgfv"] Nov 30 08:17:19 crc kubenswrapper[4941]: I1130 08:17:19.089728 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-5d2d-account-create-update-kdc28"] Nov 30 08:17:19 crc kubenswrapper[4941]: I1130 08:17:19.541305 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49a676f8-dd5f-4912-b268-9ccc3b7fe108" path="/var/lib/kubelet/pods/49a676f8-dd5f-4912-b268-9ccc3b7fe108/volumes" Nov 30 08:17:19 crc kubenswrapper[4941]: I1130 08:17:19.542001 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e736e32c-0e4a-4150-adbc-3fe99e2f48c8" path="/var/lib/kubelet/pods/e736e32c-0e4a-4150-adbc-3fe99e2f48c8/volumes" Nov 30 08:17:20 crc kubenswrapper[4941]: I1130 08:17:20.523007 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:17:20 crc kubenswrapper[4941]: E1130 08:17:20.524814 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.357205 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5cb476c99f-sf99n"] Nov 30 08:17:25 crc kubenswrapper[4941]: E1130 08:17:25.358727 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon-log" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.358750 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon-log" Nov 30 08:17:25 crc kubenswrapper[4941]: E1130 08:17:25.358763 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.358770 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" Nov 30 08:17:25 crc kubenswrapper[4941]: E1130 08:17:25.358807 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon-log" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.358816 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon-log" Nov 30 08:17:25 crc kubenswrapper[4941]: E1130 08:17:25.358830 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.358836 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.359099 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.359116 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.359128 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="9776f793-a9cc-4ca4-9dc2-85d258cde161" containerName="horizon-log" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.359139 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c29b9f4d-dde1-4a7d-93e7-1b66223aba61" containerName="horizon-log" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.360689 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.372392 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cb476c99f-sf99n"] Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.559483 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24e1159e-4632-4a9e-b573-c0625eaf4a93-scripts\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.559649 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbh9x\" (UniqueName: \"kubernetes.io/projected/24e1159e-4632-4a9e-b573-c0625eaf4a93-kube-api-access-cbh9x\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.559763 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e1159e-4632-4a9e-b573-c0625eaf4a93-logs\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.559968 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/24e1159e-4632-4a9e-b573-c0625eaf4a93-horizon-secret-key\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.560025 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24e1159e-4632-4a9e-b573-c0625eaf4a93-config-data\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.663203 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/24e1159e-4632-4a9e-b573-c0625eaf4a93-horizon-secret-key\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.663292 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24e1159e-4632-4a9e-b573-c0625eaf4a93-config-data\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.663367 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24e1159e-4632-4a9e-b573-c0625eaf4a93-scripts\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.663446 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbh9x\" (UniqueName: \"kubernetes.io/projected/24e1159e-4632-4a9e-b573-c0625eaf4a93-kube-api-access-cbh9x\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.663545 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e1159e-4632-4a9e-b573-c0625eaf4a93-logs\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.664389 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24e1159e-4632-4a9e-b573-c0625eaf4a93-logs\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.664544 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24e1159e-4632-4a9e-b573-c0625eaf4a93-scripts\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.665526 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24e1159e-4632-4a9e-b573-c0625eaf4a93-config-data\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.680411 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/24e1159e-4632-4a9e-b573-c0625eaf4a93-horizon-secret-key\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.689753 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbh9x\" (UniqueName: \"kubernetes.io/projected/24e1159e-4632-4a9e-b573-c0625eaf4a93-kube-api-access-cbh9x\") pod \"horizon-5cb476c99f-sf99n\" (UID: \"24e1159e-4632-4a9e-b573-c0625eaf4a93\") " pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:25 crc kubenswrapper[4941]: I1130 08:17:25.984470 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:26 crc kubenswrapper[4941]: I1130 08:17:26.733418 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cb476c99f-sf99n"] Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.364110 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb476c99f-sf99n" event={"ID":"24e1159e-4632-4a9e-b573-c0625eaf4a93","Type":"ContainerStarted","Data":"827f8a3c94fde69938a205f04c23e2676c2a959a58a17bd4f2e917e5a1c5e763"} Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.364552 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb476c99f-sf99n" event={"ID":"24e1159e-4632-4a9e-b573-c0625eaf4a93","Type":"ContainerStarted","Data":"f00cd9c79d971dbc3305910eaf91408b2d554b7e1f27defa92fbe4ce409587eb"} Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.364568 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb476c99f-sf99n" event={"ID":"24e1159e-4632-4a9e-b573-c0625eaf4a93","Type":"ContainerStarted","Data":"7e69301d58ca35e5ff13300b09c5e2fe1a6857d97701837ad0296f3599fd9c61"} Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.401253 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5cb476c99f-sf99n" podStartSLOduration=2.401234162 podStartE2EDuration="2.401234162s" podCreationTimestamp="2025-11-30 08:17:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:17:27.399889039 +0000 UTC m=+5468.168060648" watchObservedRunningTime="2025-11-30 08:17:27.401234162 +0000 UTC m=+5468.169405771" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.457300 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-g84q2"] Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.459129 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.466359 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-g84q2"] Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.572200 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cdae-account-create-update-n7gzv"] Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.575403 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.578574 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.586079 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cdae-account-create-update-n7gzv"] Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.621384 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whxvm\" (UniqueName: \"kubernetes.io/projected/d098d395-0a98-4ba6-97eb-8422ead8b53e-kube-api-access-whxvm\") pod \"heat-db-create-g84q2\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.621442 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d098d395-0a98-4ba6-97eb-8422ead8b53e-operator-scripts\") pod \"heat-db-create-g84q2\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.724463 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whxvm\" (UniqueName: \"kubernetes.io/projected/d098d395-0a98-4ba6-97eb-8422ead8b53e-kube-api-access-whxvm\") pod \"heat-db-create-g84q2\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.724537 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d098d395-0a98-4ba6-97eb-8422ead8b53e-operator-scripts\") pod \"heat-db-create-g84q2\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.724577 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-operator-scripts\") pod \"heat-cdae-account-create-update-n7gzv\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.724632 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbl5j\" (UniqueName: \"kubernetes.io/projected/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-kube-api-access-cbl5j\") pod \"heat-cdae-account-create-update-n7gzv\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.725756 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d098d395-0a98-4ba6-97eb-8422ead8b53e-operator-scripts\") pod \"heat-db-create-g84q2\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.749914 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whxvm\" (UniqueName: \"kubernetes.io/projected/d098d395-0a98-4ba6-97eb-8422ead8b53e-kube-api-access-whxvm\") pod \"heat-db-create-g84q2\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.827317 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-operator-scripts\") pod \"heat-cdae-account-create-update-n7gzv\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.827753 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbl5j\" (UniqueName: \"kubernetes.io/projected/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-kube-api-access-cbl5j\") pod \"heat-cdae-account-create-update-n7gzv\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.828134 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-operator-scripts\") pod \"heat-cdae-account-create-update-n7gzv\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.833671 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-g84q2" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.846750 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbl5j\" (UniqueName: \"kubernetes.io/projected/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-kube-api-access-cbl5j\") pod \"heat-cdae-account-create-update-n7gzv\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:27 crc kubenswrapper[4941]: I1130 08:17:27.895729 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:28 crc kubenswrapper[4941]: I1130 08:17:28.328194 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-g84q2"] Nov 30 08:17:28 crc kubenswrapper[4941]: I1130 08:17:28.375244 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-g84q2" event={"ID":"d098d395-0a98-4ba6-97eb-8422ead8b53e","Type":"ContainerStarted","Data":"4f3654b83f4b036413c68d04c45a8ed3618f9df08a356e89b886dcdf4c116b67"} Nov 30 08:17:28 crc kubenswrapper[4941]: W1130 08:17:28.495897 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7900951c_1fdb_49aa_b9ac_f5bc9f3edd7f.slice/crio-1fed8507ed481a01ef34833542e668a9feacf43cb9efae5005d0eb5b8f1a7955 WatchSource:0}: Error finding container 1fed8507ed481a01ef34833542e668a9feacf43cb9efae5005d0eb5b8f1a7955: Status 404 returned error can't find the container with id 1fed8507ed481a01ef34833542e668a9feacf43cb9efae5005d0eb5b8f1a7955 Nov 30 08:17:28 crc kubenswrapper[4941]: I1130 08:17:28.496617 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cdae-account-create-update-n7gzv"] Nov 30 08:17:29 crc kubenswrapper[4941]: I1130 08:17:29.388983 4941 generic.go:334] "Generic (PLEG): container finished" podID="d098d395-0a98-4ba6-97eb-8422ead8b53e" containerID="71bde4bcdb52219de56f0629ce04d0747bec6a7055739c84cf8b409466d8c12f" exitCode=0 Nov 30 08:17:29 crc kubenswrapper[4941]: I1130 08:17:29.389101 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-g84q2" event={"ID":"d098d395-0a98-4ba6-97eb-8422ead8b53e","Type":"ContainerDied","Data":"71bde4bcdb52219de56f0629ce04d0747bec6a7055739c84cf8b409466d8c12f"} Nov 30 08:17:29 crc kubenswrapper[4941]: I1130 08:17:29.391297 4941 generic.go:334] "Generic (PLEG): container finished" podID="7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" containerID="3a24397659f2d5ca117389c006c669b840dca3c9f1351fdefc6ea3ef6ae9f2c8" exitCode=0 Nov 30 08:17:29 crc kubenswrapper[4941]: I1130 08:17:29.391354 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cdae-account-create-update-n7gzv" event={"ID":"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f","Type":"ContainerDied","Data":"3a24397659f2d5ca117389c006c669b840dca3c9f1351fdefc6ea3ef6ae9f2c8"} Nov 30 08:17:29 crc kubenswrapper[4941]: I1130 08:17:29.391382 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cdae-account-create-update-n7gzv" event={"ID":"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f","Type":"ContainerStarted","Data":"1fed8507ed481a01ef34833542e668a9feacf43cb9efae5005d0eb5b8f1a7955"} Nov 30 08:17:30 crc kubenswrapper[4941]: I1130 08:17:30.884112 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:30 crc kubenswrapper[4941]: I1130 08:17:30.890082 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-g84q2" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.012976 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-operator-scripts\") pod \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.013102 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbl5j\" (UniqueName: \"kubernetes.io/projected/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-kube-api-access-cbl5j\") pod \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\" (UID: \"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f\") " Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.013143 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d098d395-0a98-4ba6-97eb-8422ead8b53e-operator-scripts\") pod \"d098d395-0a98-4ba6-97eb-8422ead8b53e\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.013390 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whxvm\" (UniqueName: \"kubernetes.io/projected/d098d395-0a98-4ba6-97eb-8422ead8b53e-kube-api-access-whxvm\") pod \"d098d395-0a98-4ba6-97eb-8422ead8b53e\" (UID: \"d098d395-0a98-4ba6-97eb-8422ead8b53e\") " Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.014099 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d098d395-0a98-4ba6-97eb-8422ead8b53e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d098d395-0a98-4ba6-97eb-8422ead8b53e" (UID: "d098d395-0a98-4ba6-97eb-8422ead8b53e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.014162 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" (UID: "7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.014708 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.014737 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d098d395-0a98-4ba6-97eb-8422ead8b53e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.020304 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d098d395-0a98-4ba6-97eb-8422ead8b53e-kube-api-access-whxvm" (OuterVolumeSpecName: "kube-api-access-whxvm") pod "d098d395-0a98-4ba6-97eb-8422ead8b53e" (UID: "d098d395-0a98-4ba6-97eb-8422ead8b53e"). InnerVolumeSpecName "kube-api-access-whxvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.028457 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-kube-api-access-cbl5j" (OuterVolumeSpecName: "kube-api-access-cbl5j") pod "7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" (UID: "7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f"). InnerVolumeSpecName "kube-api-access-cbl5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.122580 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbl5j\" (UniqueName: \"kubernetes.io/projected/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f-kube-api-access-cbl5j\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.122644 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whxvm\" (UniqueName: \"kubernetes.io/projected/d098d395-0a98-4ba6-97eb-8422ead8b53e-kube-api-access-whxvm\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.425828 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cdae-account-create-update-n7gzv" event={"ID":"7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f","Type":"ContainerDied","Data":"1fed8507ed481a01ef34833542e668a9feacf43cb9efae5005d0eb5b8f1a7955"} Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.425908 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fed8507ed481a01ef34833542e668a9feacf43cb9efae5005d0eb5b8f1a7955" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.426046 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cdae-account-create-update-n7gzv" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.444035 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-g84q2" event={"ID":"d098d395-0a98-4ba6-97eb-8422ead8b53e","Type":"ContainerDied","Data":"4f3654b83f4b036413c68d04c45a8ed3618f9df08a356e89b886dcdf4c116b67"} Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.444089 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f3654b83f4b036413c68d04c45a8ed3618f9df08a356e89b886dcdf4c116b67" Nov 30 08:17:31 crc kubenswrapper[4941]: I1130 08:17:31.444125 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-g84q2" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.043728 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-c8bnk"] Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.057427 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-c8bnk"] Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.522245 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:17:32 crc kubenswrapper[4941]: E1130 08:17:32.523151 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.689875 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-58z6d"] Nov 30 08:17:32 crc kubenswrapper[4941]: E1130 08:17:32.690614 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d098d395-0a98-4ba6-97eb-8422ead8b53e" containerName="mariadb-database-create" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.690640 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d098d395-0a98-4ba6-97eb-8422ead8b53e" containerName="mariadb-database-create" Nov 30 08:17:32 crc kubenswrapper[4941]: E1130 08:17:32.690675 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" containerName="mariadb-account-create-update" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.690685 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" containerName="mariadb-account-create-update" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.690937 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" containerName="mariadb-account-create-update" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.690975 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d098d395-0a98-4ba6-97eb-8422ead8b53e" containerName="mariadb-database-create" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.691701 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.702044 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-7xkhh" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.702655 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.721116 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-58z6d"] Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.778438 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-combined-ca-bundle\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.778527 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-config-data\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.779254 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc2vr\" (UniqueName: \"kubernetes.io/projected/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-kube-api-access-vc2vr\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.881301 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc2vr\" (UniqueName: \"kubernetes.io/projected/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-kube-api-access-vc2vr\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.881401 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-combined-ca-bundle\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.881434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-config-data\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.886862 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-combined-ca-bundle\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.897916 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-config-data\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:32 crc kubenswrapper[4941]: I1130 08:17:32.898829 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc2vr\" (UniqueName: \"kubernetes.io/projected/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-kube-api-access-vc2vr\") pod \"heat-db-sync-58z6d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:33 crc kubenswrapper[4941]: I1130 08:17:33.088545 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:33 crc kubenswrapper[4941]: I1130 08:17:33.548096 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b" path="/var/lib/kubelet/pods/0c1e7bf6-b46e-45ea-ab8d-7d2410b2283b/volumes" Nov 30 08:17:33 crc kubenswrapper[4941]: I1130 08:17:33.636428 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-58z6d"] Nov 30 08:17:34 crc kubenswrapper[4941]: I1130 08:17:34.508122 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-58z6d" event={"ID":"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d","Type":"ContainerStarted","Data":"06d9bf5146953adef4b32c3ef4923a38a2161e04248e6d37f8b3af0f9e092ba9"} Nov 30 08:17:35 crc kubenswrapper[4941]: I1130 08:17:35.984940 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:35 crc kubenswrapper[4941]: I1130 08:17:35.985839 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:43 crc kubenswrapper[4941]: I1130 08:17:43.631464 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-58z6d" event={"ID":"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d","Type":"ContainerStarted","Data":"b2659ddb03c0b4a8892ebea7de61ac31aba1fa4ef90aa953a37b961938d0b571"} Nov 30 08:17:43 crc kubenswrapper[4941]: I1130 08:17:43.660343 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-58z6d" podStartSLOduration=2.202847285 podStartE2EDuration="11.660315567s" podCreationTimestamp="2025-11-30 08:17:32 +0000 UTC" firstStartedPulling="2025-11-30 08:17:33.651390737 +0000 UTC m=+5474.419562346" lastFinishedPulling="2025-11-30 08:17:43.108859009 +0000 UTC m=+5483.877030628" observedRunningTime="2025-11-30 08:17:43.6468522 +0000 UTC m=+5484.415023829" watchObservedRunningTime="2025-11-30 08:17:43.660315567 +0000 UTC m=+5484.428487176" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.522712 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:17:45 crc kubenswrapper[4941]: E1130 08:17:45.523651 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.604738 4941 scope.go:117] "RemoveContainer" containerID="53d311d948f473d507fa1005f78d9b07e86af904cbd06cd7510418e732b8da6c" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.641970 4941 scope.go:117] "RemoveContainer" containerID="b0ff277bdfac8cdfb24dc35693381df167a2a08c0c0ddb9dde9ee8803e38cc43" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.655001 4941 generic.go:334] "Generic (PLEG): container finished" podID="30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" containerID="b2659ddb03c0b4a8892ebea7de61ac31aba1fa4ef90aa953a37b961938d0b571" exitCode=0 Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.655088 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-58z6d" event={"ID":"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d","Type":"ContainerDied","Data":"b2659ddb03c0b4a8892ebea7de61ac31aba1fa4ef90aa953a37b961938d0b571"} Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.692420 4941 scope.go:117] "RemoveContainer" containerID="8e29a923d30838597ce1faa16476d52c498fadd04a61db675f0dcf2db3807df0" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.721264 4941 scope.go:117] "RemoveContainer" containerID="17b8ba9ec1ec33645e56fc950b3c6b218cbddda3670a6763b3afe810c9b56def" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.811014 4941 scope.go:117] "RemoveContainer" containerID="fd590181835bab9c1c8b5945f946e8d413603d46fda3cb36441c24cdc4b6e040" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.847373 4941 scope.go:117] "RemoveContainer" containerID="b754f3768a2ef8eb69d109d9b16d1869a1662f042c556220f10e0889cbb1d743" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.874000 4941 scope.go:117] "RemoveContainer" containerID="75382cfed0f3e8ef6d3fc213cede13af9e7c3db980daaefeb9bbc7cb371f6935" Nov 30 08:17:45 crc kubenswrapper[4941]: I1130 08:17:45.986833 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5cb476c99f-sf99n" podUID="24e1159e-4632-4a9e-b573-c0625eaf4a93" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.92:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.92:8080: connect: connection refused" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.091549 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.197011 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-config-data\") pod \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.197186 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-combined-ca-bundle\") pod \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.197239 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc2vr\" (UniqueName: \"kubernetes.io/projected/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-kube-api-access-vc2vr\") pod \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\" (UID: \"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d\") " Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.208578 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-kube-api-access-vc2vr" (OuterVolumeSpecName: "kube-api-access-vc2vr") pod "30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" (UID: "30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d"). InnerVolumeSpecName "kube-api-access-vc2vr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.235862 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" (UID: "30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.275492 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-config-data" (OuterVolumeSpecName: "config-data") pod "30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" (UID: "30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.299360 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.299415 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.299432 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc2vr\" (UniqueName: \"kubernetes.io/projected/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d-kube-api-access-vc2vr\") on node \"crc\" DevicePath \"\"" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.691361 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-58z6d" event={"ID":"30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d","Type":"ContainerDied","Data":"06d9bf5146953adef4b32c3ef4923a38a2161e04248e6d37f8b3af0f9e092ba9"} Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.691412 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06d9bf5146953adef4b32c3ef4923a38a2161e04248e6d37f8b3af0f9e092ba9" Nov 30 08:17:47 crc kubenswrapper[4941]: I1130 08:17:47.691480 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-58z6d" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.952084 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6df4c5b7cd-hmfx6"] Nov 30 08:17:48 crc kubenswrapper[4941]: E1130 08:17:48.953154 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" containerName="heat-db-sync" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.953174 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" containerName="heat-db-sync" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.953422 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" containerName="heat-db-sync" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.955394 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.958788 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.959021 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 30 08:17:48 crc kubenswrapper[4941]: I1130 08:17:48.959139 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-7xkhh" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.006870 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6df4c5b7cd-hmfx6"] Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.037095 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zjfr\" (UniqueName: \"kubernetes.io/projected/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-kube-api-access-2zjfr\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.037261 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-config-data-custom\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.037297 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-config-data\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.037557 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-combined-ca-bundle\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.138506 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zjfr\" (UniqueName: \"kubernetes.io/projected/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-kube-api-access-2zjfr\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.138633 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-config-data-custom\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.138687 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-config-data\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.138727 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-combined-ca-bundle\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.151682 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-combined-ca-bundle\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.155604 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-config-data\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.166311 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-config-data-custom\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.173108 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zjfr\" (UniqueName: \"kubernetes.io/projected/2080cb1e-6630-4e86-9bfd-61ce1d7490fc-kube-api-access-2zjfr\") pod \"heat-engine-6df4c5b7cd-hmfx6\" (UID: \"2080cb1e-6630-4e86-9bfd-61ce1d7490fc\") " pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.174396 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-5668d7bff6-xcb8s"] Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.176364 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.180564 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.198390 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-774d8c7f69-ws8bn"] Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.200237 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.203607 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.214900 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5668d7bff6-xcb8s"] Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.239231 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-774d8c7f69-ws8bn"] Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.300177 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342486 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-combined-ca-bundle\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342562 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk655\" (UniqueName: \"kubernetes.io/projected/c9f0701b-867f-4c22-9d1b-e01a5644424b-kube-api-access-pk655\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342590 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-combined-ca-bundle\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342617 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-config-data\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342654 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-config-data-custom\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342723 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsqbx\" (UniqueName: \"kubernetes.io/projected/ae5e67e1-01bb-403d-ab7e-b21042b07f87-kube-api-access-zsqbx\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342776 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-config-data-custom\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.342828 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-config-data\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444526 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-config-data\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444626 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-combined-ca-bundle\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444657 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk655\" (UniqueName: \"kubernetes.io/projected/c9f0701b-867f-4c22-9d1b-e01a5644424b-kube-api-access-pk655\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444688 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-combined-ca-bundle\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444714 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-config-data\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444754 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-config-data-custom\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444828 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsqbx\" (UniqueName: \"kubernetes.io/projected/ae5e67e1-01bb-403d-ab7e-b21042b07f87-kube-api-access-zsqbx\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.444883 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-config-data-custom\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.454279 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-combined-ca-bundle\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.457043 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-config-data-custom\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.458211 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-combined-ca-bundle\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.460671 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9f0701b-867f-4c22-9d1b-e01a5644424b-config-data\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.461894 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-config-data-custom\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.468058 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae5e67e1-01bb-403d-ab7e-b21042b07f87-config-data\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.471921 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsqbx\" (UniqueName: \"kubernetes.io/projected/ae5e67e1-01bb-403d-ab7e-b21042b07f87-kube-api-access-zsqbx\") pod \"heat-api-5668d7bff6-xcb8s\" (UID: \"ae5e67e1-01bb-403d-ab7e-b21042b07f87\") " pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.481385 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk655\" (UniqueName: \"kubernetes.io/projected/c9f0701b-867f-4c22-9d1b-e01a5644424b-kube-api-access-pk655\") pod \"heat-cfnapi-774d8c7f69-ws8bn\" (UID: \"c9f0701b-867f-4c22-9d1b-e01a5644424b\") " pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.558858 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.569162 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:49 crc kubenswrapper[4941]: I1130 08:17:49.883765 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6df4c5b7cd-hmfx6"] Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.083004 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5668d7bff6-xcb8s"] Nov 30 08:17:50 crc kubenswrapper[4941]: W1130 08:17:50.086350 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae5e67e1_01bb_403d_ab7e_b21042b07f87.slice/crio-a9682351c6458c04b67e20e021b417eca3c6b2117c0efc2bbff42669ddb191b6 WatchSource:0}: Error finding container a9682351c6458c04b67e20e021b417eca3c6b2117c0efc2bbff42669ddb191b6: Status 404 returned error can't find the container with id a9682351c6458c04b67e20e021b417eca3c6b2117c0efc2bbff42669ddb191b6 Nov 30 08:17:50 crc kubenswrapper[4941]: W1130 08:17:50.100531 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9f0701b_867f_4c22_9d1b_e01a5644424b.slice/crio-8a568f00920758c246a4f0b65c389bb5b65d7077e992fb5aeb0219edb31bb505 WatchSource:0}: Error finding container 8a568f00920758c246a4f0b65c389bb5b65d7077e992fb5aeb0219edb31bb505: Status 404 returned error can't find the container with id 8a568f00920758c246a4f0b65c389bb5b65d7077e992fb5aeb0219edb31bb505 Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.102280 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-774d8c7f69-ws8bn"] Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.738652 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" event={"ID":"2080cb1e-6630-4e86-9bfd-61ce1d7490fc","Type":"ContainerStarted","Data":"baf4e63ae416ea6fddcd96dac444b87ed9b7864c22b70b7b414d9fa62e93bc39"} Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.739097 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" event={"ID":"2080cb1e-6630-4e86-9bfd-61ce1d7490fc","Type":"ContainerStarted","Data":"a1f64075236d6a65c7f220c25c00c36ce9c5482628bd576f67aaf1ad7336ca60"} Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.741359 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.741969 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" event={"ID":"c9f0701b-867f-4c22-9d1b-e01a5644424b","Type":"ContainerStarted","Data":"8a568f00920758c246a4f0b65c389bb5b65d7077e992fb5aeb0219edb31bb505"} Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.747096 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5668d7bff6-xcb8s" event={"ID":"ae5e67e1-01bb-403d-ab7e-b21042b07f87","Type":"ContainerStarted","Data":"a9682351c6458c04b67e20e021b417eca3c6b2117c0efc2bbff42669ddb191b6"} Nov 30 08:17:50 crc kubenswrapper[4941]: I1130 08:17:50.769528 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" podStartSLOduration=2.76950143 podStartE2EDuration="2.76950143s" podCreationTimestamp="2025-11-30 08:17:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:17:50.75950699 +0000 UTC m=+5491.527678599" watchObservedRunningTime="2025-11-30 08:17:50.76950143 +0000 UTC m=+5491.537673039" Nov 30 08:17:52 crc kubenswrapper[4941]: I1130 08:17:52.777099 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5668d7bff6-xcb8s" event={"ID":"ae5e67e1-01bb-403d-ab7e-b21042b07f87","Type":"ContainerStarted","Data":"191605882d33e21f853533603c00ee7b2f04bdc0877ffc7dacf8731f36cf5333"} Nov 30 08:17:52 crc kubenswrapper[4941]: I1130 08:17:52.777815 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:17:52 crc kubenswrapper[4941]: I1130 08:17:52.778638 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" event={"ID":"c9f0701b-867f-4c22-9d1b-e01a5644424b","Type":"ContainerStarted","Data":"5c0765cf7f46bd9e0c2d1c722795ad2c224fc03d15a063399629739af97477b0"} Nov 30 08:17:52 crc kubenswrapper[4941]: I1130 08:17:52.779022 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:17:52 crc kubenswrapper[4941]: I1130 08:17:52.800315 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-5668d7bff6-xcb8s" podStartSLOduration=2.185212255 podStartE2EDuration="3.800287813s" podCreationTimestamp="2025-11-30 08:17:49 +0000 UTC" firstStartedPulling="2025-11-30 08:17:50.088724889 +0000 UTC m=+5490.856896518" lastFinishedPulling="2025-11-30 08:17:51.703800467 +0000 UTC m=+5492.471972076" observedRunningTime="2025-11-30 08:17:52.794763812 +0000 UTC m=+5493.562935421" watchObservedRunningTime="2025-11-30 08:17:52.800287813 +0000 UTC m=+5493.568459422" Nov 30 08:17:57 crc kubenswrapper[4941]: I1130 08:17:57.941579 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:57 crc kubenswrapper[4941]: I1130 08:17:57.979831 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" podStartSLOduration=7.360807513 podStartE2EDuration="8.979810642s" podCreationTimestamp="2025-11-30 08:17:49 +0000 UTC" firstStartedPulling="2025-11-30 08:17:50.102507006 +0000 UTC m=+5490.870678615" lastFinishedPulling="2025-11-30 08:17:51.721510135 +0000 UTC m=+5492.489681744" observedRunningTime="2025-11-30 08:17:52.82666762 +0000 UTC m=+5493.594839229" watchObservedRunningTime="2025-11-30 08:17:57.979810642 +0000 UTC m=+5498.747982251" Nov 30 08:17:58 crc kubenswrapper[4941]: I1130 08:17:58.066558 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-szx5m"] Nov 30 08:17:58 crc kubenswrapper[4941]: I1130 08:17:58.092552 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8389-account-create-update-m7724"] Nov 30 08:17:58 crc kubenswrapper[4941]: I1130 08:17:58.103145 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8389-account-create-update-m7724"] Nov 30 08:17:58 crc kubenswrapper[4941]: I1130 08:17:58.117675 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-szx5m"] Nov 30 08:17:58 crc kubenswrapper[4941]: I1130 08:17:58.522101 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:17:58 crc kubenswrapper[4941]: E1130 08:17:58.522496 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:17:59 crc kubenswrapper[4941]: I1130 08:17:59.532760 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46540d03-cc01-4368-9097-a0db9616b3a9" path="/var/lib/kubelet/pods/46540d03-cc01-4368-9097-a0db9616b3a9/volumes" Nov 30 08:17:59 crc kubenswrapper[4941]: I1130 08:17:59.533757 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3232a4a-5afb-4b43-aa1d-80d03640444e" path="/var/lib/kubelet/pods/a3232a4a-5afb-4b43-aa1d-80d03640444e/volumes" Nov 30 08:17:59 crc kubenswrapper[4941]: I1130 08:17:59.812008 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5cb476c99f-sf99n" Nov 30 08:17:59 crc kubenswrapper[4941]: I1130 08:17:59.899563 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-849bf75bb9-frr7k"] Nov 30 08:17:59 crc kubenswrapper[4941]: I1130 08:17:59.900438 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-849bf75bb9-frr7k" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon-log" containerID="cri-o://5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b" gracePeriod=30 Nov 30 08:17:59 crc kubenswrapper[4941]: I1130 08:17:59.900944 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-849bf75bb9-frr7k" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" containerID="cri-o://8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549" gracePeriod=30 Nov 30 08:18:01 crc kubenswrapper[4941]: I1130 08:18:01.016800 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-774d8c7f69-ws8bn" Nov 30 08:18:01 crc kubenswrapper[4941]: I1130 08:18:01.077703 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-5668d7bff6-xcb8s" Nov 30 08:18:03 crc kubenswrapper[4941]: I1130 08:18:03.905998 4941 generic.go:334] "Generic (PLEG): container finished" podID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerID="8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549" exitCode=0 Nov 30 08:18:03 crc kubenswrapper[4941]: I1130 08:18:03.906434 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-849bf75bb9-frr7k" event={"ID":"4fea21ec-32aa-4c9a-8794-9fe7ca69e135","Type":"ContainerDied","Data":"8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549"} Nov 30 08:18:08 crc kubenswrapper[4941]: I1130 08:18:08.055920 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-bwln7"] Nov 30 08:18:08 crc kubenswrapper[4941]: I1130 08:18:08.056648 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-bwln7"] Nov 30 08:18:08 crc kubenswrapper[4941]: I1130 08:18:08.551699 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-849bf75bb9-frr7k" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.89:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.89:8080: connect: connection refused" Nov 30 08:18:09 crc kubenswrapper[4941]: I1130 08:18:09.340870 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6df4c5b7cd-hmfx6" Nov 30 08:18:09 crc kubenswrapper[4941]: I1130 08:18:09.532169 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:18:09 crc kubenswrapper[4941]: E1130 08:18:09.532493 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:18:09 crc kubenswrapper[4941]: I1130 08:18:09.540019 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c74578e-3a37-4eb5-992a-5378b4d10be1" path="/var/lib/kubelet/pods/8c74578e-3a37-4eb5-992a-5378b4d10be1/volumes" Nov 30 08:18:18 crc kubenswrapper[4941]: I1130 08:18:18.551418 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-849bf75bb9-frr7k" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.89:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.89:8080: connect: connection refused" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.490117 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj"] Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.493435 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.496266 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.500985 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj"] Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.647477 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjcnv\" (UniqueName: \"kubernetes.io/projected/84659532-4d63-4199-a05a-7636f9a2f4d4-kube-api-access-vjcnv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.647682 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.647738 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.749658 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.749732 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.749887 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjcnv\" (UniqueName: \"kubernetes.io/projected/84659532-4d63-4199-a05a-7636f9a2f4d4-kube-api-access-vjcnv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.750205 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.750496 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.783857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjcnv\" (UniqueName: \"kubernetes.io/projected/84659532-4d63-4199-a05a-7636f9a2f4d4-kube-api-access-vjcnv\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.873087 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 30 08:18:19 crc kubenswrapper[4941]: I1130 08:18:19.881314 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:20 crc kubenswrapper[4941]: I1130 08:18:20.379637 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj"] Nov 30 08:18:20 crc kubenswrapper[4941]: W1130 08:18:20.387536 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84659532_4d63_4199_a05a_7636f9a2f4d4.slice/crio-aadb5074264f0911ecb692e8ff6c5ca758a43560c61761dc2b661160324610ea WatchSource:0}: Error finding container aadb5074264f0911ecb692e8ff6c5ca758a43560c61761dc2b661160324610ea: Status 404 returned error can't find the container with id aadb5074264f0911ecb692e8ff6c5ca758a43560c61761dc2b661160324610ea Nov 30 08:18:20 crc kubenswrapper[4941]: I1130 08:18:20.522238 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:18:20 crc kubenswrapper[4941]: E1130 08:18:20.522958 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:18:21 crc kubenswrapper[4941]: I1130 08:18:21.129545 4941 generic.go:334] "Generic (PLEG): container finished" podID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerID="4bd6788eb93f0c2d70d18f750a1c3d8965db68c7479f33f023ffe42ace57985f" exitCode=0 Nov 30 08:18:21 crc kubenswrapper[4941]: I1130 08:18:21.129602 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" event={"ID":"84659532-4d63-4199-a05a-7636f9a2f4d4","Type":"ContainerDied","Data":"4bd6788eb93f0c2d70d18f750a1c3d8965db68c7479f33f023ffe42ace57985f"} Nov 30 08:18:21 crc kubenswrapper[4941]: I1130 08:18:21.129637 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" event={"ID":"84659532-4d63-4199-a05a-7636f9a2f4d4","Type":"ContainerStarted","Data":"aadb5074264f0911ecb692e8ff6c5ca758a43560c61761dc2b661160324610ea"} Nov 30 08:18:23 crc kubenswrapper[4941]: I1130 08:18:23.169143 4941 generic.go:334] "Generic (PLEG): container finished" podID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerID="2bc6dee5162005c65a5fc94212e68ffc12998401e55e900fdab4ddf081e6a238" exitCode=0 Nov 30 08:18:23 crc kubenswrapper[4941]: I1130 08:18:23.169362 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" event={"ID":"84659532-4d63-4199-a05a-7636f9a2f4d4","Type":"ContainerDied","Data":"2bc6dee5162005c65a5fc94212e68ffc12998401e55e900fdab4ddf081e6a238"} Nov 30 08:18:24 crc kubenswrapper[4941]: I1130 08:18:24.192110 4941 generic.go:334] "Generic (PLEG): container finished" podID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerID="4ee6435af60be6ccc5c9aa8a83cb66e0703b5b6ad9329d14ffbf7fb5ff53c69d" exitCode=0 Nov 30 08:18:24 crc kubenswrapper[4941]: I1130 08:18:24.192190 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" event={"ID":"84659532-4d63-4199-a05a-7636f9a2f4d4","Type":"ContainerDied","Data":"4ee6435af60be6ccc5c9aa8a83cb66e0703b5b6ad9329d14ffbf7fb5ff53c69d"} Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.649077 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.810119 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-bundle\") pod \"84659532-4d63-4199-a05a-7636f9a2f4d4\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.810259 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-util\") pod \"84659532-4d63-4199-a05a-7636f9a2f4d4\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.810375 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjcnv\" (UniqueName: \"kubernetes.io/projected/84659532-4d63-4199-a05a-7636f9a2f4d4-kube-api-access-vjcnv\") pod \"84659532-4d63-4199-a05a-7636f9a2f4d4\" (UID: \"84659532-4d63-4199-a05a-7636f9a2f4d4\") " Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.814068 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-bundle" (OuterVolumeSpecName: "bundle") pod "84659532-4d63-4199-a05a-7636f9a2f4d4" (UID: "84659532-4d63-4199-a05a-7636f9a2f4d4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.820417 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84659532-4d63-4199-a05a-7636f9a2f4d4-kube-api-access-vjcnv" (OuterVolumeSpecName: "kube-api-access-vjcnv") pod "84659532-4d63-4199-a05a-7636f9a2f4d4" (UID: "84659532-4d63-4199-a05a-7636f9a2f4d4"). InnerVolumeSpecName "kube-api-access-vjcnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.831062 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-util" (OuterVolumeSpecName: "util") pod "84659532-4d63-4199-a05a-7636f9a2f4d4" (UID: "84659532-4d63-4199-a05a-7636f9a2f4d4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.913315 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjcnv\" (UniqueName: \"kubernetes.io/projected/84659532-4d63-4199-a05a-7636f9a2f4d4-kube-api-access-vjcnv\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.913382 4941 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:25 crc kubenswrapper[4941]: I1130 08:18:25.913397 4941 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/84659532-4d63-4199-a05a-7636f9a2f4d4-util\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:26 crc kubenswrapper[4941]: I1130 08:18:26.219257 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" event={"ID":"84659532-4d63-4199-a05a-7636f9a2f4d4","Type":"ContainerDied","Data":"aadb5074264f0911ecb692e8ff6c5ca758a43560c61761dc2b661160324610ea"} Nov 30 08:18:26 crc kubenswrapper[4941]: I1130 08:18:26.219751 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aadb5074264f0911ecb692e8ff6c5ca758a43560c61761dc2b661160324610ea" Nov 30 08:18:26 crc kubenswrapper[4941]: I1130 08:18:26.219352 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj" Nov 30 08:18:28 crc kubenswrapper[4941]: I1130 08:18:28.551711 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-849bf75bb9-frr7k" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.89:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.89:8080: connect: connection refused" Nov 30 08:18:28 crc kubenswrapper[4941]: I1130 08:18:28.551928 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.283253 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.285314 4941 generic.go:334] "Generic (PLEG): container finished" podID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerID="5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b" exitCode=137 Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.285356 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-849bf75bb9-frr7k" event={"ID":"4fea21ec-32aa-4c9a-8794-9fe7ca69e135","Type":"ContainerDied","Data":"5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b"} Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.285395 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-849bf75bb9-frr7k" event={"ID":"4fea21ec-32aa-4c9a-8794-9fe7ca69e135","Type":"ContainerDied","Data":"6acb3f7da21175d54dff2030c6021e7baa394f7fe43c17e3c58f754fabcf419d"} Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.285431 4941 scope.go:117] "RemoveContainer" containerID="8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.438597 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-scripts\") pod \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.438710 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6tpq\" (UniqueName: \"kubernetes.io/projected/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-kube-api-access-c6tpq\") pod \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.438743 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-config-data\") pod \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.438920 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-horizon-secret-key\") pod \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.438971 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-logs\") pod \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\" (UID: \"4fea21ec-32aa-4c9a-8794-9fe7ca69e135\") " Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.439932 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-logs" (OuterVolumeSpecName: "logs") pod "4fea21ec-32aa-4c9a-8794-9fe7ca69e135" (UID: "4fea21ec-32aa-4c9a-8794-9fe7ca69e135"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.465158 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-kube-api-access-c6tpq" (OuterVolumeSpecName: "kube-api-access-c6tpq") pod "4fea21ec-32aa-4c9a-8794-9fe7ca69e135" (UID: "4fea21ec-32aa-4c9a-8794-9fe7ca69e135"). InnerVolumeSpecName "kube-api-access-c6tpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.468908 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4fea21ec-32aa-4c9a-8794-9fe7ca69e135" (UID: "4fea21ec-32aa-4c9a-8794-9fe7ca69e135"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.475320 4941 scope.go:117] "RemoveContainer" containerID="5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.482694 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-config-data" (OuterVolumeSpecName: "config-data") pod "4fea21ec-32aa-4c9a-8794-9fe7ca69e135" (UID: "4fea21ec-32aa-4c9a-8794-9fe7ca69e135"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.488586 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-scripts" (OuterVolumeSpecName: "scripts") pod "4fea21ec-32aa-4c9a-8794-9fe7ca69e135" (UID: "4fea21ec-32aa-4c9a-8794-9fe7ca69e135"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.541627 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.541668 4941 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.541690 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-logs\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.541701 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.541710 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6tpq\" (UniqueName: \"kubernetes.io/projected/4fea21ec-32aa-4c9a-8794-9fe7ca69e135-kube-api-access-c6tpq\") on node \"crc\" DevicePath \"\"" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.585011 4941 scope.go:117] "RemoveContainer" containerID="8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549" Nov 30 08:18:30 crc kubenswrapper[4941]: E1130 08:18:30.585652 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549\": container with ID starting with 8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549 not found: ID does not exist" containerID="8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.585693 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549"} err="failed to get container status \"8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549\": rpc error: code = NotFound desc = could not find container \"8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549\": container with ID starting with 8438a4d72c294ba18f474eaf413053d7dd2748854ca0f1f068ec1d341d116549 not found: ID does not exist" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.585722 4941 scope.go:117] "RemoveContainer" containerID="5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b" Nov 30 08:18:30 crc kubenswrapper[4941]: E1130 08:18:30.586118 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b\": container with ID starting with 5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b not found: ID does not exist" containerID="5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b" Nov 30 08:18:30 crc kubenswrapper[4941]: I1130 08:18:30.586180 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b"} err="failed to get container status \"5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b\": rpc error: code = NotFound desc = could not find container \"5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b\": container with ID starting with 5d09f0c0f38b98e8bf1dff7ec65aefe257973b177b56b36b6d72bf2078b98a3b not found: ID does not exist" Nov 30 08:18:31 crc kubenswrapper[4941]: I1130 08:18:31.308509 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-849bf75bb9-frr7k" Nov 30 08:18:31 crc kubenswrapper[4941]: I1130 08:18:31.388569 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-849bf75bb9-frr7k"] Nov 30 08:18:31 crc kubenswrapper[4941]: I1130 08:18:31.410877 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-849bf75bb9-frr7k"] Nov 30 08:18:31 crc kubenswrapper[4941]: I1130 08:18:31.539686 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" path="/var/lib/kubelet/pods/4fea21ec-32aa-4c9a-8794-9fe7ca69e135/volumes" Nov 30 08:18:31 crc kubenswrapper[4941]: E1130 08:18:31.584140 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fea21ec_32aa_4c9a_8794_9fe7ca69e135.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fea21ec_32aa_4c9a_8794_9fe7ca69e135.slice/crio-6acb3f7da21175d54dff2030c6021e7baa394f7fe43c17e3c58f754fabcf419d\": RecentStats: unable to find data in memory cache]" Nov 30 08:18:35 crc kubenswrapper[4941]: I1130 08:18:35.521853 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:18:35 crc kubenswrapper[4941]: E1130 08:18:35.522744 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.893029 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg"] Nov 30 08:18:37 crc kubenswrapper[4941]: E1130 08:18:37.896733 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon-log" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.896769 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon-log" Nov 30 08:18:37 crc kubenswrapper[4941]: E1130 08:18:37.896785 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="pull" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.896791 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="pull" Nov 30 08:18:37 crc kubenswrapper[4941]: E1130 08:18:37.896804 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.896810 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" Nov 30 08:18:37 crc kubenswrapper[4941]: E1130 08:18:37.896823 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="extract" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.896829 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="extract" Nov 30 08:18:37 crc kubenswrapper[4941]: E1130 08:18:37.896841 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="util" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.896846 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="util" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.897046 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.897063 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="84659532-4d63-4199-a05a-7636f9a2f4d4" containerName="extract" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.897074 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fea21ec-32aa-4c9a-8794-9fe7ca69e135" containerName="horizon-log" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.897793 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.900058 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-5jjvz" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.900144 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.901949 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.927184 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg"] Nov 30 08:18:37 crc kubenswrapper[4941]: I1130 08:18:37.997382 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sbfm\" (UniqueName: \"kubernetes.io/projected/3514c902-e0b0-4f3e-a885-5f2c84daf49c-kube-api-access-4sbfm\") pod \"obo-prometheus-operator-668cf9dfbb-9gmqg\" (UID: \"3514c902-e0b0-4f3e-a885-5f2c84daf49c\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.037663 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.039087 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.043629 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-pqjmz" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.043675 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.060164 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.076288 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.077844 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.099386 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sbfm\" (UniqueName: \"kubernetes.io/projected/3514c902-e0b0-4f3e-a885-5f2c84daf49c-kube-api-access-4sbfm\") pod \"obo-prometheus-operator-668cf9dfbb-9gmqg\" (UID: \"3514c902-e0b0-4f3e-a885-5f2c84daf49c\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.111973 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.131299 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sbfm\" (UniqueName: \"kubernetes.io/projected/3514c902-e0b0-4f3e-a885-5f2c84daf49c-kube-api-access-4sbfm\") pod \"obo-prometheus-operator-668cf9dfbb-9gmqg\" (UID: \"3514c902-e0b0-4f3e-a885-5f2c84daf49c\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.201275 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/240c2020-021e-4c67-ba69-51ed7c6fb5a4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt\" (UID: \"240c2020-021e-4c67-ba69-51ed7c6fb5a4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.201379 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/240c2020-021e-4c67-ba69-51ed7c6fb5a4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt\" (UID: \"240c2020-021e-4c67-ba69-51ed7c6fb5a4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.201430 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/056ca204-09f9-4e74-b66f-f7b59c87d535-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd\" (UID: \"056ca204-09f9-4e74-b66f-f7b59c87d535\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.201472 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/056ca204-09f9-4e74-b66f-f7b59c87d535-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd\" (UID: \"056ca204-09f9-4e74-b66f-f7b59c87d535\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.221968 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.256355 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-9sjwb"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.257752 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.263762 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.264242 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-dg4ts" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.273889 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-9sjwb"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.302982 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/240c2020-021e-4c67-ba69-51ed7c6fb5a4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt\" (UID: \"240c2020-021e-4c67-ba69-51ed7c6fb5a4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.303058 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/240c2020-021e-4c67-ba69-51ed7c6fb5a4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt\" (UID: \"240c2020-021e-4c67-ba69-51ed7c6fb5a4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.303101 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/056ca204-09f9-4e74-b66f-f7b59c87d535-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd\" (UID: \"056ca204-09f9-4e74-b66f-f7b59c87d535\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.303140 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/056ca204-09f9-4e74-b66f-f7b59c87d535-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd\" (UID: \"056ca204-09f9-4e74-b66f-f7b59c87d535\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.309620 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/056ca204-09f9-4e74-b66f-f7b59c87d535-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd\" (UID: \"056ca204-09f9-4e74-b66f-f7b59c87d535\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.317888 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/240c2020-021e-4c67-ba69-51ed7c6fb5a4-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt\" (UID: \"240c2020-021e-4c67-ba69-51ed7c6fb5a4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.326875 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/056ca204-09f9-4e74-b66f-f7b59c87d535-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd\" (UID: \"056ca204-09f9-4e74-b66f-f7b59c87d535\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.329019 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/240c2020-021e-4c67-ba69-51ed7c6fb5a4-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt\" (UID: \"240c2020-021e-4c67-ba69-51ed7c6fb5a4\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.359833 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.404850 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/11d96cee-acf1-4ada-ae48-e1888bb96c96-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-9sjwb\" (UID: \"11d96cee-acf1-4ada-ae48-e1888bb96c96\") " pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.405320 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5f8rf\" (UniqueName: \"kubernetes.io/projected/11d96cee-acf1-4ada-ae48-e1888bb96c96-kube-api-access-5f8rf\") pod \"observability-operator-d8bb48f5d-9sjwb\" (UID: \"11d96cee-acf1-4ada-ae48-e1888bb96c96\") " pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.409886 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.468121 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-krb82"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.470136 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.474279 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-c6b4s" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.501086 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-krb82"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.509612 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/11d96cee-acf1-4ada-ae48-e1888bb96c96-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-9sjwb\" (UID: \"11d96cee-acf1-4ada-ae48-e1888bb96c96\") " pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.510116 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5f8rf\" (UniqueName: \"kubernetes.io/projected/11d96cee-acf1-4ada-ae48-e1888bb96c96-kube-api-access-5f8rf\") pod \"observability-operator-d8bb48f5d-9sjwb\" (UID: \"11d96cee-acf1-4ada-ae48-e1888bb96c96\") " pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.517395 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/11d96cee-acf1-4ada-ae48-e1888bb96c96-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-9sjwb\" (UID: \"11d96cee-acf1-4ada-ae48-e1888bb96c96\") " pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.528685 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5f8rf\" (UniqueName: \"kubernetes.io/projected/11d96cee-acf1-4ada-ae48-e1888bb96c96-kube-api-access-5f8rf\") pod \"observability-operator-d8bb48f5d-9sjwb\" (UID: \"11d96cee-acf1-4ada-ae48-e1888bb96c96\") " pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.611787 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t99l4\" (UniqueName: \"kubernetes.io/projected/aaea3a7d-8b16-4e12-a9d3-5653861349b0-kube-api-access-t99l4\") pod \"perses-operator-5446b9c989-krb82\" (UID: \"aaea3a7d-8b16-4e12-a9d3-5653861349b0\") " pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.612031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/aaea3a7d-8b16-4e12-a9d3-5653861349b0-openshift-service-ca\") pod \"perses-operator-5446b9c989-krb82\" (UID: \"aaea3a7d-8b16-4e12-a9d3-5653861349b0\") " pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.714687 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/aaea3a7d-8b16-4e12-a9d3-5653861349b0-openshift-service-ca\") pod \"perses-operator-5446b9c989-krb82\" (UID: \"aaea3a7d-8b16-4e12-a9d3-5653861349b0\") " pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.714814 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t99l4\" (UniqueName: \"kubernetes.io/projected/aaea3a7d-8b16-4e12-a9d3-5653861349b0-kube-api-access-t99l4\") pod \"perses-operator-5446b9c989-krb82\" (UID: \"aaea3a7d-8b16-4e12-a9d3-5653861349b0\") " pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.716615 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/aaea3a7d-8b16-4e12-a9d3-5653861349b0-openshift-service-ca\") pod \"perses-operator-5446b9c989-krb82\" (UID: \"aaea3a7d-8b16-4e12-a9d3-5653861349b0\") " pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.741475 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t99l4\" (UniqueName: \"kubernetes.io/projected/aaea3a7d-8b16-4e12-a9d3-5653861349b0-kube-api-access-t99l4\") pod \"perses-operator-5446b9c989-krb82\" (UID: \"aaea3a7d-8b16-4e12-a9d3-5653861349b0\") " pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.761470 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.810959 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.900446 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt"] Nov 30 08:18:38 crc kubenswrapper[4941]: I1130 08:18:38.939113 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg"] Nov 30 08:18:39 crc kubenswrapper[4941]: I1130 08:18:39.088482 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd"] Nov 30 08:18:39 crc kubenswrapper[4941]: W1130 08:18:39.128160 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod056ca204_09f9_4e74_b66f_f7b59c87d535.slice/crio-17f88b02f2050d40f0aa590fa5bc5dd1b2e0a74aefc779d8cf661c73096cb016 WatchSource:0}: Error finding container 17f88b02f2050d40f0aa590fa5bc5dd1b2e0a74aefc779d8cf661c73096cb016: Status 404 returned error can't find the container with id 17f88b02f2050d40f0aa590fa5bc5dd1b2e0a74aefc779d8cf661c73096cb016 Nov 30 08:18:39 crc kubenswrapper[4941]: I1130 08:18:39.396138 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" event={"ID":"240c2020-021e-4c67-ba69-51ed7c6fb5a4","Type":"ContainerStarted","Data":"b86bff4177d3aa1de9bcafe327f8377decd711db6159926c9a08648583a5a963"} Nov 30 08:18:39 crc kubenswrapper[4941]: I1130 08:18:39.398516 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" event={"ID":"056ca204-09f9-4e74-b66f-f7b59c87d535","Type":"ContainerStarted","Data":"17f88b02f2050d40f0aa590fa5bc5dd1b2e0a74aefc779d8cf661c73096cb016"} Nov 30 08:18:39 crc kubenswrapper[4941]: I1130 08:18:39.399948 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" event={"ID":"3514c902-e0b0-4f3e-a885-5f2c84daf49c","Type":"ContainerStarted","Data":"5655c161b67c7c26bce8227382c38fe5621255ae3c30b6c2acead655ad927d45"} Nov 30 08:18:39 crc kubenswrapper[4941]: I1130 08:18:39.551407 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-9sjwb"] Nov 30 08:18:39 crc kubenswrapper[4941]: I1130 08:18:39.641542 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-krb82"] Nov 30 08:18:40 crc kubenswrapper[4941]: I1130 08:18:40.415954 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-krb82" event={"ID":"aaea3a7d-8b16-4e12-a9d3-5653861349b0","Type":"ContainerStarted","Data":"0f3a5a74521ddd26792514ed0936a8b03e524d08738a83e5266bc70925fa528f"} Nov 30 08:18:40 crc kubenswrapper[4941]: I1130 08:18:40.435304 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" event={"ID":"11d96cee-acf1-4ada-ae48-e1888bb96c96","Type":"ContainerStarted","Data":"749592e34086ec3f90a7cd92d1f2695a65c84724fbcc4d77a4dcc506b26b44ce"} Nov 30 08:18:46 crc kubenswrapper[4941]: I1130 08:18:46.042932 4941 scope.go:117] "RemoveContainer" containerID="d1461aa04a4331e5ef7c4b92e13b15edc48d62ad8f50f72983515624c9dedecc" Nov 30 08:18:48 crc kubenswrapper[4941]: I1130 08:18:48.038156 4941 scope.go:117] "RemoveContainer" containerID="a9b146867e7abd5c9f5b366a6d5f68317337d409e73e56c52270e501ae1475af" Nov 30 08:18:48 crc kubenswrapper[4941]: I1130 08:18:48.106435 4941 scope.go:117] "RemoveContainer" containerID="b21747adecca951148c0b6fdb98d07314f18f3930075899b1d6df1c3a8bb2677" Nov 30 08:18:48 crc kubenswrapper[4941]: I1130 08:18:48.158271 4941 scope.go:117] "RemoveContainer" containerID="de008258045eea3358e7c0869723b3d8d17871af9c03ebc72d0681a9bbfabdac" Nov 30 08:18:48 crc kubenswrapper[4941]: I1130 08:18:48.536920 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" event={"ID":"240c2020-021e-4c67-ba69-51ed7c6fb5a4","Type":"ContainerStarted","Data":"e69d8c25240f1229a9a86f19fc53291a53f8ac0c22461443379196d2f97169d0"} Nov 30 08:18:48 crc kubenswrapper[4941]: I1130 08:18:48.607073 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt" podStartSLOduration=1.53570171 podStartE2EDuration="10.607046821s" podCreationTimestamp="2025-11-30 08:18:38 +0000 UTC" firstStartedPulling="2025-11-30 08:18:38.966995078 +0000 UTC m=+5539.735166687" lastFinishedPulling="2025-11-30 08:18:48.038340189 +0000 UTC m=+5548.806511798" observedRunningTime="2025-11-30 08:18:48.592905583 +0000 UTC m=+5549.361077192" watchObservedRunningTime="2025-11-30 08:18:48.607046821 +0000 UTC m=+5549.375218430" Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.042869 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-b628k"] Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.053557 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-b628k"] Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.532638 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ab98dc4-33db-4ca6-8393-b3d2454de757" path="/var/lib/kubelet/pods/1ab98dc4-33db-4ca6-8393-b3d2454de757/volumes" Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.547289 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" event={"ID":"3514c902-e0b0-4f3e-a885-5f2c84daf49c","Type":"ContainerStarted","Data":"197177f89672b8ca55528b8d2f0c670a79e66df91b0abd52a59595511cd54829"} Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.549700 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" event={"ID":"056ca204-09f9-4e74-b66f-f7b59c87d535","Type":"ContainerStarted","Data":"9cc7705008124e1d88327759df77b8e5b1c7c62832cc58acb3ad0ede1d0af37e"} Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.552045 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-krb82" event={"ID":"aaea3a7d-8b16-4e12-a9d3-5653861349b0","Type":"ContainerStarted","Data":"fba86ae858b8234ec4b1105cc5ae6e2721dd75de0c377268380331ba1c8adb41"} Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.552173 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.554057 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" event={"ID":"11d96cee-acf1-4ada-ae48-e1888bb96c96","Type":"ContainerStarted","Data":"e19d4197e03081f6d9188cf68ac04eb0405ea6b455e4605b258f9373d6bec599"} Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.606294 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" podStartSLOduration=2.980029131 podStartE2EDuration="11.606271167s" podCreationTimestamp="2025-11-30 08:18:38 +0000 UTC" firstStartedPulling="2025-11-30 08:18:39.540053394 +0000 UTC m=+5540.308225003" lastFinishedPulling="2025-11-30 08:18:48.16629543 +0000 UTC m=+5548.934467039" observedRunningTime="2025-11-30 08:18:49.6021537 +0000 UTC m=+5550.370325309" watchObservedRunningTime="2025-11-30 08:18:49.606271167 +0000 UTC m=+5550.374442776" Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.635690 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-krb82" podStartSLOduration=3.199155595 podStartE2EDuration="11.635673878s" podCreationTimestamp="2025-11-30 08:18:38 +0000 UTC" firstStartedPulling="2025-11-30 08:18:39.672123352 +0000 UTC m=+5540.440294961" lastFinishedPulling="2025-11-30 08:18:48.108641635 +0000 UTC m=+5548.876813244" observedRunningTime="2025-11-30 08:18:49.63156174 +0000 UTC m=+5550.399733369" watchObservedRunningTime="2025-11-30 08:18:49.635673878 +0000 UTC m=+5550.403845487" Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.652877 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-9gmqg" podStartSLOduration=3.58416744 podStartE2EDuration="12.65286261s" podCreationTimestamp="2025-11-30 08:18:37 +0000 UTC" firstStartedPulling="2025-11-30 08:18:39.039909004 +0000 UTC m=+5539.808080613" lastFinishedPulling="2025-11-30 08:18:48.108604174 +0000 UTC m=+5548.876775783" observedRunningTime="2025-11-30 08:18:49.650744133 +0000 UTC m=+5550.418915742" watchObservedRunningTime="2025-11-30 08:18:49.65286261 +0000 UTC m=+5550.421034219" Nov 30 08:18:49 crc kubenswrapper[4941]: I1130 08:18:49.689928 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd" podStartSLOduration=2.788754761 podStartE2EDuration="11.689901395s" podCreationTimestamp="2025-11-30 08:18:38 +0000 UTC" firstStartedPulling="2025-11-30 08:18:39.137127073 +0000 UTC m=+5539.905298682" lastFinishedPulling="2025-11-30 08:18:48.038273717 +0000 UTC m=+5548.806445316" observedRunningTime="2025-11-30 08:18:49.677365588 +0000 UTC m=+5550.445537197" watchObservedRunningTime="2025-11-30 08:18:49.689901395 +0000 UTC m=+5550.458073014" Nov 30 08:18:50 crc kubenswrapper[4941]: I1130 08:18:50.074958 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-eb6c-account-create-update-6xtp9"] Nov 30 08:18:50 crc kubenswrapper[4941]: I1130 08:18:50.095172 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-eb6c-account-create-update-6xtp9"] Nov 30 08:18:50 crc kubenswrapper[4941]: I1130 08:18:50.522177 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:18:50 crc kubenswrapper[4941]: E1130 08:18:50.522876 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:18:50 crc kubenswrapper[4941]: I1130 08:18:50.569252 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:50 crc kubenswrapper[4941]: I1130 08:18:50.575585 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-9sjwb" Nov 30 08:18:51 crc kubenswrapper[4941]: I1130 08:18:51.533966 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b591166-3d20-4fb2-94e2-1c6c38555910" path="/var/lib/kubelet/pods/7b591166-3d20-4fb2-94e2-1c6c38555910/volumes" Nov 30 08:18:58 crc kubenswrapper[4941]: I1130 08:18:58.815843 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-krb82" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.451975 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.452946 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" containerName="openstackclient" containerID="cri-o://22dc93670680ebf076a6f69c890f212a609262e12833cf19eaa55540c36212ed" gracePeriod=2 Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.462655 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.519352 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 30 08:19:01 crc kubenswrapper[4941]: E1130 08:19:01.520069 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" containerName="openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.520092 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" containerName="openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.520316 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" containerName="openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.521434 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.522509 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:19:01 crc kubenswrapper[4941]: E1130 08:19:01.522837 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.541737 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" podUID="84f0bf8c-bf1e-4179-b156-652c9591c146" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.544405 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.612514 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84f0bf8c-bf1e-4179-b156-652c9591c146-openstack-config\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.612626 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84f0bf8c-bf1e-4179-b156-652c9591c146-openstack-config-secret\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.612760 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hl6r\" (UniqueName: \"kubernetes.io/projected/84f0bf8c-bf1e-4179-b156-652c9591c146-kube-api-access-5hl6r\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.714322 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.714927 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84f0bf8c-bf1e-4179-b156-652c9591c146-openstack-config\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.715097 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84f0bf8c-bf1e-4179-b156-652c9591c146-openstack-config-secret\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.715284 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hl6r\" (UniqueName: \"kubernetes.io/projected/84f0bf8c-bf1e-4179-b156-652c9591c146-kube-api-access-5hl6r\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.715842 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/84f0bf8c-bf1e-4179-b156-652c9591c146-openstack-config\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.715999 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.722005 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-4rf9t" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.722454 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/84f0bf8c-bf1e-4179-b156-652c9591c146-openstack-config-secret\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.737286 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.748374 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hl6r\" (UniqueName: \"kubernetes.io/projected/84f0bf8c-bf1e-4179-b156-652c9591c146-kube-api-access-5hl6r\") pod \"openstackclient\" (UID: \"84f0bf8c-bf1e-4179-b156-652c9591c146\") " pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.828533 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mf27\" (UniqueName: \"kubernetes.io/projected/f664d4a3-d502-4925-aa66-755419694d2b-kube-api-access-6mf27\") pod \"kube-state-metrics-0\" (UID: \"f664d4a3-d502-4925-aa66-755419694d2b\") " pod="openstack/kube-state-metrics-0" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.873528 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.938707 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mf27\" (UniqueName: \"kubernetes.io/projected/f664d4a3-d502-4925-aa66-755419694d2b-kube-api-access-6mf27\") pod \"kube-state-metrics-0\" (UID: \"f664d4a3-d502-4925-aa66-755419694d2b\") " pod="openstack/kube-state-metrics-0" Nov 30 08:19:01 crc kubenswrapper[4941]: I1130 08:19:01.976677 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mf27\" (UniqueName: \"kubernetes.io/projected/f664d4a3-d502-4925-aa66-755419694d2b-kube-api-access-6mf27\") pod \"kube-state-metrics-0\" (UID: \"f664d4a3-d502-4925-aa66-755419694d2b\") " pod="openstack/kube-state-metrics-0" Nov 30 08:19:02 crc kubenswrapper[4941]: I1130 08:19:02.136010 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.168135 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.177501 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.202439 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.207025 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.207215 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.207424 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.207586 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-42hcn" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.207698 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.215355 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.263003 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311600 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311722 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311778 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/97438922-72d6-4d56-bfa0-11e88de4d27f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311809 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/97438922-72d6-4d56-bfa0-11e88de4d27f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311827 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311857 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-config\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311872 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/97438922-72d6-4d56-bfa0-11e88de4d27f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.311897 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6cvl\" (UniqueName: \"kubernetes.io/projected/97438922-72d6-4d56-bfa0-11e88de4d27f-kube-api-access-k6cvl\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.416867 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417021 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417089 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/97438922-72d6-4d56-bfa0-11e88de4d27f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417115 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/97438922-72d6-4d56-bfa0-11e88de4d27f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417137 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417167 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/97438922-72d6-4d56-bfa0-11e88de4d27f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417186 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-config\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.417215 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6cvl\" (UniqueName: \"kubernetes.io/projected/97438922-72d6-4d56-bfa0-11e88de4d27f-kube-api-access-k6cvl\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.425936 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/97438922-72d6-4d56-bfa0-11e88de4d27f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.429293 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-config\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.432125 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.438190 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/97438922-72d6-4d56-bfa0-11e88de4d27f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.441522 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/97438922-72d6-4d56-bfa0-11e88de4d27f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.444864 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6cvl\" (UniqueName: \"kubernetes.io/projected/97438922-72d6-4d56-bfa0-11e88de4d27f-kube-api-access-k6cvl\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.459765 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/97438922-72d6-4d56-bfa0-11e88de4d27f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.485422 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.495865 4941 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.495913 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/656c6ed823a45af8cebda1a65f910a59eed06e5dd745a1ecaf03f33f134e3f97/globalmount\"" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.647318 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-624ffb2f-659c-4f1c-b556-0fffe090e0f1\") pod \"prometheus-metric-storage-0\" (UID: \"97438922-72d6-4d56-bfa0-11e88de4d27f\") " pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.896846 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f664d4a3-d502-4925-aa66-755419694d2b","Type":"ContainerStarted","Data":"89831765c5a2894e16aa79047faa9972180ab891b6b201180cf2227d9a9d8cf8"} Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.900260 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"84f0bf8c-bf1e-4179-b156-652c9591c146","Type":"ContainerStarted","Data":"fcffc5359f2aa7a123ec0e44535264bac55a686a05bb087e85d9981390848b0b"} Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.900316 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"84f0bf8c-bf1e-4179-b156-652c9591c146","Type":"ContainerStarted","Data":"fcb416ec3d849303f2f800ec3069014e7dda2a765326233346298958ab2f5764"} Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.906311 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.910609 4941 generic.go:334] "Generic (PLEG): container finished" podID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" containerID="22dc93670680ebf076a6f69c890f212a609262e12833cf19eaa55540c36212ed" exitCode=137 Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.910728 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3168a3259280afe110633475094e477eb546202d51b3efdb6ceb3985f12c196" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.923920 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:19:03 crc kubenswrapper[4941]: I1130 08:19:03.925533 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.925503606 podStartE2EDuration="2.925503606s" podCreationTimestamp="2025-11-30 08:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:19:03.924348359 +0000 UTC m=+5564.692519968" watchObservedRunningTime="2025-11-30 08:19:03.925503606 +0000 UTC m=+5564.693675215" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.045394 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config-secret\") pod \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.045440 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config\") pod \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.045573 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrtd8\" (UniqueName: \"kubernetes.io/projected/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-kube-api-access-lrtd8\") pod \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\" (UID: \"7482c40c-9ba1-4393-90dc-3d5b0cf29cbb\") " Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.055108 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-kube-api-access-lrtd8" (OuterVolumeSpecName: "kube-api-access-lrtd8") pod "7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" (UID: "7482c40c-9ba1-4393-90dc-3d5b0cf29cbb"). InnerVolumeSpecName "kube-api-access-lrtd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.086962 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" (UID: "7482c40c-9ba1-4393-90dc-3d5b0cf29cbb"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.126212 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" (UID: "7482c40c-9ba1-4393-90dc-3d5b0cf29cbb"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.161907 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrtd8\" (UniqueName: \"kubernetes.io/projected/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-kube-api-access-lrtd8\") on node \"crc\" DevicePath \"\"" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.162139 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.162199 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.257724 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.260531 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.264844 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.265092 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-9wntx" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.265115 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.265228 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.265385 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.286924 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.366621 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.367501 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.367625 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7qxw\" (UniqueName: \"kubernetes.io/projected/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-kube-api-access-h7qxw\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.367802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.368235 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.368352 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.368452 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.471474 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.471951 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.472126 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7qxw\" (UniqueName: \"kubernetes.io/projected/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-kube-api-access-h7qxw\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.472998 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.473779 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.474255 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.474536 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.474964 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.479439 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.486659 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.486939 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.487151 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.487474 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.490486 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7qxw\" (UniqueName: \"kubernetes.io/projected/d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e-kube-api-access-h7qxw\") pod \"alertmanager-metric-storage-0\" (UID: \"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e\") " pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.556068 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.602743 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.921786 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f664d4a3-d502-4925-aa66-755419694d2b","Type":"ContainerStarted","Data":"62c495c8b85122b423ae8d38d080e17cdd3b2f9977d2b2e2d422c398960bd442"} Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.922596 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.923799 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"97438922-72d6-4d56-bfa0-11e88de4d27f","Type":"ContainerStarted","Data":"856a5e8d32faa0b2051aaf93ec27a8ab44bc40905adada0982211600e8f2c69e"} Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.923889 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 30 08:19:04 crc kubenswrapper[4941]: I1130 08:19:04.948486 4941 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" podUID="84f0bf8c-bf1e-4179-b156-652c9591c146" Nov 30 08:19:05 crc kubenswrapper[4941]: I1130 08:19:05.059911 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.472988449 podStartE2EDuration="4.059886125s" podCreationTimestamp="2025-11-30 08:19:01 +0000 UTC" firstStartedPulling="2025-11-30 08:19:03.293855555 +0000 UTC m=+5564.062027164" lastFinishedPulling="2025-11-30 08:19:03.880753231 +0000 UTC m=+5564.648924840" observedRunningTime="2025-11-30 08:19:04.939024255 +0000 UTC m=+5565.707195864" watchObservedRunningTime="2025-11-30 08:19:05.059886125 +0000 UTC m=+5565.828057734" Nov 30 08:19:05 crc kubenswrapper[4941]: I1130 08:19:05.068600 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 30 08:19:05 crc kubenswrapper[4941]: W1130 08:19:05.071785 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8c3c419_4754_4ee6_a7d4_3ea9c60fea9e.slice/crio-0f94241fc6096f746e7c756083d150d3837c5f259fc8f86109e383eac7915cc5 WatchSource:0}: Error finding container 0f94241fc6096f746e7c756083d150d3837c5f259fc8f86109e383eac7915cc5: Status 404 returned error can't find the container with id 0f94241fc6096f746e7c756083d150d3837c5f259fc8f86109e383eac7915cc5 Nov 30 08:19:05 crc kubenswrapper[4941]: I1130 08:19:05.534416 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7482c40c-9ba1-4393-90dc-3d5b0cf29cbb" path="/var/lib/kubelet/pods/7482c40c-9ba1-4393-90dc-3d5b0cf29cbb/volumes" Nov 30 08:19:05 crc kubenswrapper[4941]: I1130 08:19:05.951459 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e","Type":"ContainerStarted","Data":"0f94241fc6096f746e7c756083d150d3837c5f259fc8f86109e383eac7915cc5"} Nov 30 08:19:12 crc kubenswrapper[4941]: I1130 08:19:12.040429 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e","Type":"ContainerStarted","Data":"5d286ce206cada557509a0477a44862827d40c3b1d1fb5daf3c5ceebed6a2326"} Nov 30 08:19:12 crc kubenswrapper[4941]: I1130 08:19:12.044533 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"97438922-72d6-4d56-bfa0-11e88de4d27f","Type":"ContainerStarted","Data":"3f07e665495ea26d83babf9a8a2cc8b5e4fdb88b1a62951181b60d66366ca113"} Nov 30 08:19:12 crc kubenswrapper[4941]: I1130 08:19:12.140389 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 30 08:19:15 crc kubenswrapper[4941]: I1130 08:19:15.521931 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:19:15 crc kubenswrapper[4941]: E1130 08:19:15.522924 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:19:17 crc kubenswrapper[4941]: I1130 08:19:17.049298 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-cd96b"] Nov 30 08:19:17 crc kubenswrapper[4941]: I1130 08:19:17.059128 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-cd96b"] Nov 30 08:19:17 crc kubenswrapper[4941]: I1130 08:19:17.535290 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b780b1-dbee-4f02-9566-3e8e407d8ce5" path="/var/lib/kubelet/pods/35b780b1-dbee-4f02-9566-3e8e407d8ce5/volumes" Nov 30 08:19:20 crc kubenswrapper[4941]: I1130 08:19:20.152219 4941 generic.go:334] "Generic (PLEG): container finished" podID="97438922-72d6-4d56-bfa0-11e88de4d27f" containerID="3f07e665495ea26d83babf9a8a2cc8b5e4fdb88b1a62951181b60d66366ca113" exitCode=0 Nov 30 08:19:20 crc kubenswrapper[4941]: I1130 08:19:20.152356 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"97438922-72d6-4d56-bfa0-11e88de4d27f","Type":"ContainerDied","Data":"3f07e665495ea26d83babf9a8a2cc8b5e4fdb88b1a62951181b60d66366ca113"} Nov 30 08:19:21 crc kubenswrapper[4941]: I1130 08:19:21.167564 4941 generic.go:334] "Generic (PLEG): container finished" podID="d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e" containerID="5d286ce206cada557509a0477a44862827d40c3b1d1fb5daf3c5ceebed6a2326" exitCode=0 Nov 30 08:19:21 crc kubenswrapper[4941]: I1130 08:19:21.167660 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e","Type":"ContainerDied","Data":"5d286ce206cada557509a0477a44862827d40c3b1d1fb5daf3c5ceebed6a2326"} Nov 30 08:19:24 crc kubenswrapper[4941]: I1130 08:19:24.204290 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e","Type":"ContainerStarted","Data":"db7e7639f378c06cc35d958d444dc904dbf4eee88f0f814cdbb776aee9da55b9"} Nov 30 08:19:27 crc kubenswrapper[4941]: I1130 08:19:27.523291 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:19:27 crc kubenswrapper[4941]: E1130 08:19:27.524879 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:19:29 crc kubenswrapper[4941]: I1130 08:19:29.266225 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e","Type":"ContainerStarted","Data":"0a46d0d60ae3deca8f26d4efb242fc545aa6bb69b560020737beaeb21d804a04"} Nov 30 08:19:29 crc kubenswrapper[4941]: I1130 08:19:29.268887 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:29 crc kubenswrapper[4941]: I1130 08:19:29.272856 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Nov 30 08:19:29 crc kubenswrapper[4941]: I1130 08:19:29.292502 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.523233419 podStartE2EDuration="25.292482785s" podCreationTimestamp="2025-11-30 08:19:04 +0000 UTC" firstStartedPulling="2025-11-30 08:19:05.0758755 +0000 UTC m=+5565.844047109" lastFinishedPulling="2025-11-30 08:19:23.845124866 +0000 UTC m=+5584.613296475" observedRunningTime="2025-11-30 08:19:29.287877432 +0000 UTC m=+5590.056049081" watchObservedRunningTime="2025-11-30 08:19:29.292482785 +0000 UTC m=+5590.060654394" Nov 30 08:19:30 crc kubenswrapper[4941]: I1130 08:19:30.280009 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"97438922-72d6-4d56-bfa0-11e88de4d27f","Type":"ContainerStarted","Data":"5ecd761dcf634baa879f025f6b324e5e32d9276eb50be0c65ce9d4cb06ae8116"} Nov 30 08:19:34 crc kubenswrapper[4941]: I1130 08:19:34.332581 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"97438922-72d6-4d56-bfa0-11e88de4d27f","Type":"ContainerStarted","Data":"8aaed9eaa13d266d188a9593eac7ea6813953c6abcd869b3441ca0dcf271063a"} Nov 30 08:19:37 crc kubenswrapper[4941]: I1130 08:19:37.368317 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"97438922-72d6-4d56-bfa0-11e88de4d27f","Type":"ContainerStarted","Data":"3b8cb9f020b80b3ff45bd95cd1133d81fd6dfd9f1903b8e5bed1637db04a1424"} Nov 30 08:19:37 crc kubenswrapper[4941]: I1130 08:19:37.410490 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.3518154510000002 podStartE2EDuration="35.41045585s" podCreationTimestamp="2025-11-30 08:19:02 +0000 UTC" firstStartedPulling="2025-11-30 08:19:04.574841823 +0000 UTC m=+5565.343013432" lastFinishedPulling="2025-11-30 08:19:36.633482222 +0000 UTC m=+5597.401653831" observedRunningTime="2025-11-30 08:19:37.408360345 +0000 UTC m=+5598.176531954" watchObservedRunningTime="2025-11-30 08:19:37.41045585 +0000 UTC m=+5598.178627499" Nov 30 08:19:38 crc kubenswrapper[4941]: I1130 08:19:38.907659 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:40 crc kubenswrapper[4941]: I1130 08:19:40.522117 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.413461 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"b2b0ac757b4b509277e111501824da6d25da3cf0f9b1a7aee73120ac3b723944"} Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.659964 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.662793 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.666358 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.666570 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.678725 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.828437 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-scripts\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.829156 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-run-httpd\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.829591 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.829691 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.830009 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjw2l\" (UniqueName: \"kubernetes.io/projected/7dd7a347-dc57-4862-9ed0-468a49b4de16-kube-api-access-wjw2l\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.830066 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-log-httpd\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.830211 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-config-data\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932246 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932302 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932398 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjw2l\" (UniqueName: \"kubernetes.io/projected/7dd7a347-dc57-4862-9ed0-468a49b4de16-kube-api-access-wjw2l\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932420 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-log-httpd\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932466 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-config-data\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932497 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-scripts\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.932520 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-run-httpd\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.933084 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-run-httpd\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.933220 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-log-httpd\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.941133 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.943083 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-scripts\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.947568 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-config-data\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.947614 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:41 crc kubenswrapper[4941]: I1130 08:19:41.958011 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjw2l\" (UniqueName: \"kubernetes.io/projected/7dd7a347-dc57-4862-9ed0-468a49b4de16-kube-api-access-wjw2l\") pod \"ceilometer-0\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " pod="openstack/ceilometer-0" Nov 30 08:19:42 crc kubenswrapper[4941]: I1130 08:19:42.026488 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:19:42 crc kubenswrapper[4941]: I1130 08:19:42.559454 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:19:43 crc kubenswrapper[4941]: I1130 08:19:43.441897 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerStarted","Data":"bd0443ac18a7a6089089b1ffac0d3ab66c5b798471b64a44368775ff9dd01569"} Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.051794 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5f5b-account-create-update-k9fmn"] Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.066273 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5f5b-account-create-update-k9fmn"] Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.448854 4941 scope.go:117] "RemoveContainer" containerID="22dc93670680ebf076a6f69c890f212a609262e12833cf19eaa55540c36212ed" Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.505138 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerStarted","Data":"a36172a6f0f2d6bceeb0b651abc451e0ff47ce78bc7aae3eaa1a76546b9fcee4"} Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.663163 4941 scope.go:117] "RemoveContainer" containerID="5deb1759bf043fe37d721b621991513743501477835c38c92e5d0d9bc4232d69" Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.718140 4941 scope.go:117] "RemoveContainer" containerID="4cc69630185f7c314dbd3633c69cf749dbd760579e646cbd40b6ea95bad4a3cf" Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.894927 4941 scope.go:117] "RemoveContainer" containerID="7d600181d16f5a62661188ed4388538845785344960331bd961e5b19fabbcfec" Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.907467 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:48 crc kubenswrapper[4941]: I1130 08:19:48.912198 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:49 crc kubenswrapper[4941]: I1130 08:19:49.036372 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-gpqrl"] Nov 30 08:19:49 crc kubenswrapper[4941]: I1130 08:19:49.047073 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-gpqrl"] Nov 30 08:19:49 crc kubenswrapper[4941]: I1130 08:19:49.542997 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00c3f957-7477-4a82-9739-c4beeb006bbd" path="/var/lib/kubelet/pods/00c3f957-7477-4a82-9739-c4beeb006bbd/volumes" Nov 30 08:19:49 crc kubenswrapper[4941]: I1130 08:19:49.544423 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c22aeb0-d80f-41b2-b261-6d8105dc83b8" path="/var/lib/kubelet/pods/2c22aeb0-d80f-41b2-b261-6d8105dc83b8/volumes" Nov 30 08:19:49 crc kubenswrapper[4941]: I1130 08:19:49.545375 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Nov 30 08:19:49 crc kubenswrapper[4941]: I1130 08:19:49.545416 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerStarted","Data":"7f6cf6182447456efe170be4ed5e1e6733245a2a57cd1ea7c595890b235f204e"} Nov 30 08:19:50 crc kubenswrapper[4941]: I1130 08:19:50.554856 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerStarted","Data":"b3056f14b8e9cd3df34c78c461cf1e0d0eda35250717365f814129aad6e0bce1"} Nov 30 08:19:52 crc kubenswrapper[4941]: I1130 08:19:52.589275 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerStarted","Data":"873218d3f7dc48736dffb325ee97fa6c6384c1333a030509e1eea4b01d8b80d8"} Nov 30 08:19:52 crc kubenswrapper[4941]: I1130 08:19:52.590092 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 08:19:52 crc kubenswrapper[4941]: I1130 08:19:52.656849 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.873643919 podStartE2EDuration="11.65679721s" podCreationTimestamp="2025-11-30 08:19:41 +0000 UTC" firstStartedPulling="2025-11-30 08:19:42.557161233 +0000 UTC m=+5603.325332842" lastFinishedPulling="2025-11-30 08:19:51.340314524 +0000 UTC m=+5612.108486133" observedRunningTime="2025-11-30 08:19:52.617408991 +0000 UTC m=+5613.385580630" watchObservedRunningTime="2025-11-30 08:19:52.65679721 +0000 UTC m=+5613.424968829" Nov 30 08:19:58 crc kubenswrapper[4941]: I1130 08:19:58.071108 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-ct89q"] Nov 30 08:19:58 crc kubenswrapper[4941]: I1130 08:19:58.084227 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-ct89q"] Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.537195 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="926092ac-cdc5-4ad0-b9ec-bbd380355254" path="/var/lib/kubelet/pods/926092ac-cdc5-4ad0-b9ec-bbd380355254/volumes" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.624337 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-jzlcr"] Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.625655 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.650646 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-jzlcr"] Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.674577 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-operator-scripts\") pod \"aodh-db-create-jzlcr\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.674676 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzcnc\" (UniqueName: \"kubernetes.io/projected/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-kube-api-access-rzcnc\") pod \"aodh-db-create-jzlcr\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.729734 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-8a71-account-create-update-9kc9v"] Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.731563 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.737728 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.745477 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-8a71-account-create-update-9kc9v"] Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.776619 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-operator-scripts\") pod \"aodh-8a71-account-create-update-9kc9v\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.776709 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnhj7\" (UniqueName: \"kubernetes.io/projected/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-kube-api-access-fnhj7\") pod \"aodh-8a71-account-create-update-9kc9v\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.776813 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-operator-scripts\") pod \"aodh-db-create-jzlcr\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.776882 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzcnc\" (UniqueName: \"kubernetes.io/projected/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-kube-api-access-rzcnc\") pod \"aodh-db-create-jzlcr\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.777795 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-operator-scripts\") pod \"aodh-db-create-jzlcr\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.817985 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzcnc\" (UniqueName: \"kubernetes.io/projected/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-kube-api-access-rzcnc\") pod \"aodh-db-create-jzlcr\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " pod="openstack/aodh-db-create-jzlcr" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.880078 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-operator-scripts\") pod \"aodh-8a71-account-create-update-9kc9v\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.880589 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnhj7\" (UniqueName: \"kubernetes.io/projected/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-kube-api-access-fnhj7\") pod \"aodh-8a71-account-create-update-9kc9v\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.880887 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-operator-scripts\") pod \"aodh-8a71-account-create-update-9kc9v\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.910233 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnhj7\" (UniqueName: \"kubernetes.io/projected/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-kube-api-access-fnhj7\") pod \"aodh-8a71-account-create-update-9kc9v\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:19:59 crc kubenswrapper[4941]: I1130 08:19:59.948820 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-jzlcr" Nov 30 08:20:00 crc kubenswrapper[4941]: I1130 08:20:00.065133 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:20:00 crc kubenswrapper[4941]: I1130 08:20:00.522687 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-jzlcr"] Nov 30 08:20:00 crc kubenswrapper[4941]: W1130 08:20:00.526935 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c3a7561_0dd1_4f36_bd83_19ed4c763c29.slice/crio-97b50dffe3419b68ffb576fd8771e84fb74522c3eb03b76758a7a45a32ea4256 WatchSource:0}: Error finding container 97b50dffe3419b68ffb576fd8771e84fb74522c3eb03b76758a7a45a32ea4256: Status 404 returned error can't find the container with id 97b50dffe3419b68ffb576fd8771e84fb74522c3eb03b76758a7a45a32ea4256 Nov 30 08:20:00 crc kubenswrapper[4941]: I1130 08:20:00.717107 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-8a71-account-create-update-9kc9v"] Nov 30 08:20:00 crc kubenswrapper[4941]: I1130 08:20:00.721180 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-jzlcr" event={"ID":"5c3a7561-0dd1-4f36-bd83-19ed4c763c29","Type":"ContainerStarted","Data":"97b50dffe3419b68ffb576fd8771e84fb74522c3eb03b76758a7a45a32ea4256"} Nov 30 08:20:01 crc kubenswrapper[4941]: I1130 08:20:01.735469 4941 generic.go:334] "Generic (PLEG): container finished" podID="06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" containerID="69f99024a88e4abaa11c982d5a717e4d5d04a4de350d15593d18ffa25cdcd2ea" exitCode=0 Nov 30 08:20:01 crc kubenswrapper[4941]: I1130 08:20:01.735588 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-8a71-account-create-update-9kc9v" event={"ID":"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d","Type":"ContainerDied","Data":"69f99024a88e4abaa11c982d5a717e4d5d04a4de350d15593d18ffa25cdcd2ea"} Nov 30 08:20:01 crc kubenswrapper[4941]: I1130 08:20:01.735970 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-8a71-account-create-update-9kc9v" event={"ID":"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d","Type":"ContainerStarted","Data":"7ebc0497a1e5b1fdf1aec59065c093f1ba235910ada8db22ced84aa4dedf6057"} Nov 30 08:20:01 crc kubenswrapper[4941]: I1130 08:20:01.740684 4941 generic.go:334] "Generic (PLEG): container finished" podID="5c3a7561-0dd1-4f36-bd83-19ed4c763c29" containerID="bfa19e8ea86798874c8622baeda35e85aa66b48ffe384e407e17994844690749" exitCode=0 Nov 30 08:20:01 crc kubenswrapper[4941]: I1130 08:20:01.740765 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-jzlcr" event={"ID":"5c3a7561-0dd1-4f36-bd83-19ed4c763c29","Type":"ContainerDied","Data":"bfa19e8ea86798874c8622baeda35e85aa66b48ffe384e407e17994844690749"} Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.244706 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.250932 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-jzlcr" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.381544 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-operator-scripts\") pod \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.381693 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-operator-scripts\") pod \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.381769 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzcnc\" (UniqueName: \"kubernetes.io/projected/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-kube-api-access-rzcnc\") pod \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\" (UID: \"5c3a7561-0dd1-4f36-bd83-19ed4c763c29\") " Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.381931 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnhj7\" (UniqueName: \"kubernetes.io/projected/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-kube-api-access-fnhj7\") pod \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\" (UID: \"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d\") " Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.382069 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c3a7561-0dd1-4f36-bd83-19ed4c763c29" (UID: "5c3a7561-0dd1-4f36-bd83-19ed4c763c29"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.382265 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" (UID: "06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.383020 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.383041 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.389574 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-kube-api-access-rzcnc" (OuterVolumeSpecName: "kube-api-access-rzcnc") pod "5c3a7561-0dd1-4f36-bd83-19ed4c763c29" (UID: "5c3a7561-0dd1-4f36-bd83-19ed4c763c29"). InnerVolumeSpecName "kube-api-access-rzcnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.392522 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-kube-api-access-fnhj7" (OuterVolumeSpecName: "kube-api-access-fnhj7") pod "06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" (UID: "06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d"). InnerVolumeSpecName "kube-api-access-fnhj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.485196 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzcnc\" (UniqueName: \"kubernetes.io/projected/5c3a7561-0dd1-4f36-bd83-19ed4c763c29-kube-api-access-rzcnc\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.485240 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnhj7\" (UniqueName: \"kubernetes.io/projected/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d-kube-api-access-fnhj7\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.766394 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-8a71-account-create-update-9kc9v" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.766413 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-8a71-account-create-update-9kc9v" event={"ID":"06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d","Type":"ContainerDied","Data":"7ebc0497a1e5b1fdf1aec59065c093f1ba235910ada8db22ced84aa4dedf6057"} Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.767218 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ebc0497a1e5b1fdf1aec59065c093f1ba235910ada8db22ced84aa4dedf6057" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.769381 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-jzlcr" event={"ID":"5c3a7561-0dd1-4f36-bd83-19ed4c763c29","Type":"ContainerDied","Data":"97b50dffe3419b68ffb576fd8771e84fb74522c3eb03b76758a7a45a32ea4256"} Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.769431 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97b50dffe3419b68ffb576fd8771e84fb74522c3eb03b76758a7a45a32ea4256" Nov 30 08:20:03 crc kubenswrapper[4941]: I1130 08:20:03.769582 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-jzlcr" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.131390 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-s95pb"] Nov 30 08:20:05 crc kubenswrapper[4941]: E1130 08:20:05.133348 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" containerName="mariadb-account-create-update" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.133442 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" containerName="mariadb-account-create-update" Nov 30 08:20:05 crc kubenswrapper[4941]: E1130 08:20:05.133529 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c3a7561-0dd1-4f36-bd83-19ed4c763c29" containerName="mariadb-database-create" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.133588 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c3a7561-0dd1-4f36-bd83-19ed4c763c29" containerName="mariadb-database-create" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.133831 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" containerName="mariadb-account-create-update" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.133896 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c3a7561-0dd1-4f36-bd83-19ed4c763c29" containerName="mariadb-database-create" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.134765 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.137447 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.137884 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-bfnnb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.138044 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.138206 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.153425 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-s95pb"] Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.229978 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-config-data\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.230079 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tfk4\" (UniqueName: \"kubernetes.io/projected/398af6dc-0391-4164-9eab-d83d0451986b-kube-api-access-9tfk4\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.230125 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-scripts\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.230162 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-combined-ca-bundle\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.332534 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-scripts\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.332889 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-combined-ca-bundle\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.333075 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-config-data\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.333218 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tfk4\" (UniqueName: \"kubernetes.io/projected/398af6dc-0391-4164-9eab-d83d0451986b-kube-api-access-9tfk4\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.339632 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-scripts\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.339965 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-combined-ca-bundle\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.348156 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-config-data\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.359061 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tfk4\" (UniqueName: \"kubernetes.io/projected/398af6dc-0391-4164-9eab-d83d0451986b-kube-api-access-9tfk4\") pod \"aodh-db-sync-s95pb\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:05 crc kubenswrapper[4941]: I1130 08:20:05.515601 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:06 crc kubenswrapper[4941]: I1130 08:20:06.051751 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-s95pb"] Nov 30 08:20:06 crc kubenswrapper[4941]: I1130 08:20:06.063086 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:20:06 crc kubenswrapper[4941]: I1130 08:20:06.824827 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-s95pb" event={"ID":"398af6dc-0391-4164-9eab-d83d0451986b","Type":"ContainerStarted","Data":"a3392450a0459db4e52e82c34208b06b5222ba0a80d456ab1775c6e0756c80cf"} Nov 30 08:20:11 crc kubenswrapper[4941]: I1130 08:20:11.880824 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-s95pb" event={"ID":"398af6dc-0391-4164-9eab-d83d0451986b","Type":"ContainerStarted","Data":"3663f4c279b80c742c41b0cfb68ccad6d01b263ef6bef7e4f2a0ce5724e252f2"} Nov 30 08:20:11 crc kubenswrapper[4941]: I1130 08:20:11.924987 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-s95pb" podStartSLOduration=2.1759697239999998 podStartE2EDuration="6.924955628s" podCreationTimestamp="2025-11-30 08:20:05 +0000 UTC" firstStartedPulling="2025-11-30 08:20:06.062818502 +0000 UTC m=+5626.830990111" lastFinishedPulling="2025-11-30 08:20:10.811804366 +0000 UTC m=+5631.579976015" observedRunningTime="2025-11-30 08:20:11.906463006 +0000 UTC m=+5632.674634636" watchObservedRunningTime="2025-11-30 08:20:11.924955628 +0000 UTC m=+5632.693127267" Nov 30 08:20:12 crc kubenswrapper[4941]: I1130 08:20:12.035993 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 30 08:20:13 crc kubenswrapper[4941]: I1130 08:20:13.910384 4941 generic.go:334] "Generic (PLEG): container finished" podID="398af6dc-0391-4164-9eab-d83d0451986b" containerID="3663f4c279b80c742c41b0cfb68ccad6d01b263ef6bef7e4f2a0ce5724e252f2" exitCode=0 Nov 30 08:20:13 crc kubenswrapper[4941]: I1130 08:20:13.910485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-s95pb" event={"ID":"398af6dc-0391-4164-9eab-d83d0451986b","Type":"ContainerDied","Data":"3663f4c279b80c742c41b0cfb68ccad6d01b263ef6bef7e4f2a0ce5724e252f2"} Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.449586 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.632957 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-combined-ca-bundle\") pod \"398af6dc-0391-4164-9eab-d83d0451986b\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.633160 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tfk4\" (UniqueName: \"kubernetes.io/projected/398af6dc-0391-4164-9eab-d83d0451986b-kube-api-access-9tfk4\") pod \"398af6dc-0391-4164-9eab-d83d0451986b\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.633197 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-config-data\") pod \"398af6dc-0391-4164-9eab-d83d0451986b\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.633253 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-scripts\") pod \"398af6dc-0391-4164-9eab-d83d0451986b\" (UID: \"398af6dc-0391-4164-9eab-d83d0451986b\") " Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.639798 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-scripts" (OuterVolumeSpecName: "scripts") pod "398af6dc-0391-4164-9eab-d83d0451986b" (UID: "398af6dc-0391-4164-9eab-d83d0451986b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.641009 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/398af6dc-0391-4164-9eab-d83d0451986b-kube-api-access-9tfk4" (OuterVolumeSpecName: "kube-api-access-9tfk4") pod "398af6dc-0391-4164-9eab-d83d0451986b" (UID: "398af6dc-0391-4164-9eab-d83d0451986b"). InnerVolumeSpecName "kube-api-access-9tfk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.664830 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "398af6dc-0391-4164-9eab-d83d0451986b" (UID: "398af6dc-0391-4164-9eab-d83d0451986b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.665210 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-config-data" (OuterVolumeSpecName: "config-data") pod "398af6dc-0391-4164-9eab-d83d0451986b" (UID: "398af6dc-0391-4164-9eab-d83d0451986b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.735975 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.736018 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tfk4\" (UniqueName: \"kubernetes.io/projected/398af6dc-0391-4164-9eab-d83d0451986b-kube-api-access-9tfk4\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.736032 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.736043 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/398af6dc-0391-4164-9eab-d83d0451986b-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.946030 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-s95pb" event={"ID":"398af6dc-0391-4164-9eab-d83d0451986b","Type":"ContainerDied","Data":"a3392450a0459db4e52e82c34208b06b5222ba0a80d456ab1775c6e0756c80cf"} Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.946112 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3392450a0459db4e52e82c34208b06b5222ba0a80d456ab1775c6e0756c80cf" Nov 30 08:20:15 crc kubenswrapper[4941]: I1130 08:20:15.946665 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-s95pb" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.706180 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Nov 30 08:20:19 crc kubenswrapper[4941]: E1130 08:20:19.707570 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="398af6dc-0391-4164-9eab-d83d0451986b" containerName="aodh-db-sync" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.707591 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="398af6dc-0391-4164-9eab-d83d0451986b" containerName="aodh-db-sync" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.707956 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="398af6dc-0391-4164-9eab-d83d0451986b" containerName="aodh-db-sync" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.710959 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.713788 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.714708 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.723663 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-bfnnb" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.729895 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.862187 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-combined-ca-bundle\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.862283 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-scripts\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.862407 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-config-data\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.862689 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j52jz\" (UniqueName: \"kubernetes.io/projected/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-kube-api-access-j52jz\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.964480 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-combined-ca-bundle\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.964555 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-scripts\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.964582 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-config-data\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.964640 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j52jz\" (UniqueName: \"kubernetes.io/projected/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-kube-api-access-j52jz\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.971199 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-scripts\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.972064 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-combined-ca-bundle\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.980654 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-config-data\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:19 crc kubenswrapper[4941]: I1130 08:20:19.983847 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j52jz\" (UniqueName: \"kubernetes.io/projected/e0b4d9d1-d4d1-4136-b815-b582fbda8e7e-kube-api-access-j52jz\") pod \"aodh-0\" (UID: \"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e\") " pod="openstack/aodh-0" Nov 30 08:20:20 crc kubenswrapper[4941]: I1130 08:20:20.049840 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 30 08:20:20 crc kubenswrapper[4941]: I1130 08:20:20.635521 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 30 08:20:21 crc kubenswrapper[4941]: I1130 08:20:21.026867 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e","Type":"ContainerStarted","Data":"815499b453ce2e83fa133a5871cc3594ba732b57b05fa910c83b6e7b91a1bb5c"} Nov 30 08:20:21 crc kubenswrapper[4941]: I1130 08:20:21.965349 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:21 crc kubenswrapper[4941]: I1130 08:20:21.966008 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-central-agent" containerID="cri-o://a36172a6f0f2d6bceeb0b651abc451e0ff47ce78bc7aae3eaa1a76546b9fcee4" gracePeriod=30 Nov 30 08:20:21 crc kubenswrapper[4941]: I1130 08:20:21.966519 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="proxy-httpd" containerID="cri-o://873218d3f7dc48736dffb325ee97fa6c6384c1333a030509e1eea4b01d8b80d8" gracePeriod=30 Nov 30 08:20:21 crc kubenswrapper[4941]: I1130 08:20:21.966569 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="sg-core" containerID="cri-o://b3056f14b8e9cd3df34c78c461cf1e0d0eda35250717365f814129aad6e0bce1" gracePeriod=30 Nov 30 08:20:21 crc kubenswrapper[4941]: I1130 08:20:21.966641 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-notification-agent" containerID="cri-o://7f6cf6182447456efe170be4ed5e1e6733245a2a57cd1ea7c595890b235f204e" gracePeriod=30 Nov 30 08:20:22 crc kubenswrapper[4941]: I1130 08:20:22.041781 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e","Type":"ContainerStarted","Data":"a00f3abf4972cb435b7e3d7c4bf7400603cc0c240824351b8265bfa4b8799d26"} Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.070000 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerID="873218d3f7dc48736dffb325ee97fa6c6384c1333a030509e1eea4b01d8b80d8" exitCode=0 Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.070378 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerID="b3056f14b8e9cd3df34c78c461cf1e0d0eda35250717365f814129aad6e0bce1" exitCode=2 Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.070398 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerID="a36172a6f0f2d6bceeb0b651abc451e0ff47ce78bc7aae3eaa1a76546b9fcee4" exitCode=0 Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.070135 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerDied","Data":"873218d3f7dc48736dffb325ee97fa6c6384c1333a030509e1eea4b01d8b80d8"} Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.070514 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerDied","Data":"b3056f14b8e9cd3df34c78c461cf1e0d0eda35250717365f814129aad6e0bce1"} Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.070541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerDied","Data":"a36172a6f0f2d6bceeb0b651abc451e0ff47ce78bc7aae3eaa1a76546b9fcee4"} Nov 30 08:20:23 crc kubenswrapper[4941]: I1130 08:20:23.074346 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e","Type":"ContainerStarted","Data":"380ab03c17ec160a8bd163e984d10293fea4d2b407196405d5b623a709814d23"} Nov 30 08:20:24 crc kubenswrapper[4941]: I1130 08:20:24.089463 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e","Type":"ContainerStarted","Data":"35704a243ce859260bcedacb7c3004e5862ff8ac8bd7b4b21114d77543a95761"} Nov 30 08:20:25 crc kubenswrapper[4941]: I1130 08:20:25.119110 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"e0b4d9d1-d4d1-4136-b815-b582fbda8e7e","Type":"ContainerStarted","Data":"bb07c14270e0cd42a5b3000fd68b9e4cf450e4a627a6a4ea1ab026762690bc53"} Nov 30 08:20:25 crc kubenswrapper[4941]: I1130 08:20:25.162211 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.106039117 podStartE2EDuration="6.162167947s" podCreationTimestamp="2025-11-30 08:20:19 +0000 UTC" firstStartedPulling="2025-11-30 08:20:20.662130148 +0000 UTC m=+5641.430301757" lastFinishedPulling="2025-11-30 08:20:24.718258978 +0000 UTC m=+5645.486430587" observedRunningTime="2025-11-30 08:20:25.145168761 +0000 UTC m=+5645.913340410" watchObservedRunningTime="2025-11-30 08:20:25.162167947 +0000 UTC m=+5645.930339606" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.158563 4941 generic.go:334] "Generic (PLEG): container finished" podID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerID="7f6cf6182447456efe170be4ed5e1e6733245a2a57cd1ea7c595890b235f204e" exitCode=0 Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.159576 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerDied","Data":"7f6cf6182447456efe170be4ed5e1e6733245a2a57cd1ea7c595890b235f204e"} Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.345376 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.448919 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-scripts\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.449020 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-combined-ca-bundle\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.450222 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjw2l\" (UniqueName: \"kubernetes.io/projected/7dd7a347-dc57-4862-9ed0-468a49b4de16-kube-api-access-wjw2l\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.450524 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-run-httpd\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.450680 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-config-data\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.450784 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-log-httpd\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.450852 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-sg-core-conf-yaml\") pod \"7dd7a347-dc57-4862-9ed0-468a49b4de16\" (UID: \"7dd7a347-dc57-4862-9ed0-468a49b4de16\") " Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.451867 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.452209 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.459019 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.462335 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dd7a347-dc57-4862-9ed0-468a49b4de16-kube-api-access-wjw2l" (OuterVolumeSpecName: "kube-api-access-wjw2l") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "kube-api-access-wjw2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.462591 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-scripts" (OuterVolumeSpecName: "scripts") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.494537 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.557157 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7dd7a347-dc57-4862-9ed0-468a49b4de16-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.557209 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.557223 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.557236 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjw2l\" (UniqueName: \"kubernetes.io/projected/7dd7a347-dc57-4862-9ed0-468a49b4de16-kube-api-access-wjw2l\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.588453 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.616780 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-config-data" (OuterVolumeSpecName: "config-data") pod "7dd7a347-dc57-4862-9ed0-468a49b4de16" (UID: "7dd7a347-dc57-4862-9ed0-468a49b4de16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.661516 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:26 crc kubenswrapper[4941]: I1130 08:20:26.661550 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dd7a347-dc57-4862-9ed0-468a49b4de16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.180858 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7dd7a347-dc57-4862-9ed0-468a49b4de16","Type":"ContainerDied","Data":"bd0443ac18a7a6089089b1ffac0d3ab66c5b798471b64a44368775ff9dd01569"} Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.180939 4941 scope.go:117] "RemoveContainer" containerID="873218d3f7dc48736dffb325ee97fa6c6384c1333a030509e1eea4b01d8b80d8" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.181304 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.230758 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.267500 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.285293 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:27 crc kubenswrapper[4941]: E1130 08:20:27.285938 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-notification-agent" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.285963 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-notification-agent" Nov 30 08:20:27 crc kubenswrapper[4941]: E1130 08:20:27.285990 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-central-agent" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.285996 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-central-agent" Nov 30 08:20:27 crc kubenswrapper[4941]: E1130 08:20:27.286026 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="proxy-httpd" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.286033 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="proxy-httpd" Nov 30 08:20:27 crc kubenswrapper[4941]: E1130 08:20:27.286045 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="sg-core" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.286052 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="sg-core" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.286259 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="proxy-httpd" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.286274 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-notification-agent" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.286283 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="ceilometer-central-agent" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.286291 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" containerName="sg-core" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.288371 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.293854 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.294055 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.310637 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376239 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-run-httpd\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376313 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw8zf\" (UniqueName: \"kubernetes.io/projected/f357ee97-025b-4963-8332-d2c5ba303362-kube-api-access-sw8zf\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376491 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-scripts\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376559 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376589 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-config-data\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376620 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-log-httpd\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.376720 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479051 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479159 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-run-httpd\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479192 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw8zf\" (UniqueName: \"kubernetes.io/projected/f357ee97-025b-4963-8332-d2c5ba303362-kube-api-access-sw8zf\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479235 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-scripts\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479261 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479290 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-config-data\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479358 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-log-httpd\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.479942 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-run-httpd\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.480761 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-log-httpd\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.486545 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.486665 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-config-data\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.487877 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.488042 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-scripts\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.496069 4941 scope.go:117] "RemoveContainer" containerID="b3056f14b8e9cd3df34c78c461cf1e0d0eda35250717365f814129aad6e0bce1" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.505258 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw8zf\" (UniqueName: \"kubernetes.io/projected/f357ee97-025b-4963-8332-d2c5ba303362-kube-api-access-sw8zf\") pod \"ceilometer-0\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.533092 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dd7a347-dc57-4862-9ed0-468a49b4de16" path="/var/lib/kubelet/pods/7dd7a347-dc57-4862-9ed0-468a49b4de16/volumes" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.608909 4941 scope.go:117] "RemoveContainer" containerID="7f6cf6182447456efe170be4ed5e1e6733245a2a57cd1ea7c595890b235f204e" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.622733 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:27 crc kubenswrapper[4941]: I1130 08:20:27.647740 4941 scope.go:117] "RemoveContainer" containerID="a36172a6f0f2d6bceeb0b651abc451e0ff47ce78bc7aae3eaa1a76546b9fcee4" Nov 30 08:20:28 crc kubenswrapper[4941]: I1130 08:20:28.190538 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:29 crc kubenswrapper[4941]: I1130 08:20:29.220296 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerStarted","Data":"50ac0c93cbcd7eca02ec28b25b269f7a257d1aa9ff34230ce77410042d080302"} Nov 30 08:20:29 crc kubenswrapper[4941]: I1130 08:20:29.220898 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerStarted","Data":"6caf49ac800d31c774037f1dfb3fd17f6172d6613f841a0e92d842bdeff34f9a"} Nov 30 08:20:29 crc kubenswrapper[4941]: I1130 08:20:29.220916 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerStarted","Data":"6959e5adc79e6c20e20ddabeddbec36a864d824638b28b460011a4e0c009703e"} Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.094756 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-2dsp7"] Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.098283 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.106356 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-2dsp7"] Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.144312 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctgdg\" (UniqueName: \"kubernetes.io/projected/e0b24026-5088-4a66-ad1c-5088d4611867-kube-api-access-ctgdg\") pod \"manila-db-create-2dsp7\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.144389 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b24026-5088-4a66-ad1c-5088d4611867-operator-scripts\") pod \"manila-db-create-2dsp7\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.201395 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-9c47-account-create-update-tfxnb"] Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.203318 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.211305 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.219643 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-9c47-account-create-update-tfxnb"] Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.253962 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctgdg\" (UniqueName: \"kubernetes.io/projected/e0b24026-5088-4a66-ad1c-5088d4611867-kube-api-access-ctgdg\") pod \"manila-db-create-2dsp7\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.254878 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b24026-5088-4a66-ad1c-5088d4611867-operator-scripts\") pod \"manila-db-create-2dsp7\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.255040 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk9jx\" (UniqueName: \"kubernetes.io/projected/2bc4835a-9b82-4dcd-940d-24485fbb131c-kube-api-access-vk9jx\") pod \"manila-9c47-account-create-update-tfxnb\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.255166 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc4835a-9b82-4dcd-940d-24485fbb131c-operator-scripts\") pod \"manila-9c47-account-create-update-tfxnb\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.255772 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerStarted","Data":"189fa0978a6a8dff5b6c651a62d1f5878c4b2b905d89b308df415d7448fe8312"} Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.256167 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b24026-5088-4a66-ad1c-5088d4611867-operator-scripts\") pod \"manila-db-create-2dsp7\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.278203 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctgdg\" (UniqueName: \"kubernetes.io/projected/e0b24026-5088-4a66-ad1c-5088d4611867-kube-api-access-ctgdg\") pod \"manila-db-create-2dsp7\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.356983 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk9jx\" (UniqueName: \"kubernetes.io/projected/2bc4835a-9b82-4dcd-940d-24485fbb131c-kube-api-access-vk9jx\") pod \"manila-9c47-account-create-update-tfxnb\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.357068 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc4835a-9b82-4dcd-940d-24485fbb131c-operator-scripts\") pod \"manila-9c47-account-create-update-tfxnb\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.357757 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc4835a-9b82-4dcd-940d-24485fbb131c-operator-scripts\") pod \"manila-9c47-account-create-update-tfxnb\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.374904 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk9jx\" (UniqueName: \"kubernetes.io/projected/2bc4835a-9b82-4dcd-940d-24485fbb131c-kube-api-access-vk9jx\") pod \"manila-9c47-account-create-update-tfxnb\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.444137 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.521516 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:30 crc kubenswrapper[4941]: I1130 08:20:30.938237 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-2dsp7"] Nov 30 08:20:30 crc kubenswrapper[4941]: W1130 08:20:30.942319 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0b24026_5088_4a66_ad1c_5088d4611867.slice/crio-24718f5d8182bee45ed85cb8f3222b02df2068817fc8f84ef64f880d9b18c1a9 WatchSource:0}: Error finding container 24718f5d8182bee45ed85cb8f3222b02df2068817fc8f84ef64f880d9b18c1a9: Status 404 returned error can't find the container with id 24718f5d8182bee45ed85cb8f3222b02df2068817fc8f84ef64f880d9b18c1a9 Nov 30 08:20:31 crc kubenswrapper[4941]: I1130 08:20:31.095573 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-9c47-account-create-update-tfxnb"] Nov 30 08:20:31 crc kubenswrapper[4941]: W1130 08:20:31.117892 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bc4835a_9b82_4dcd_940d_24485fbb131c.slice/crio-c248fea96cf7597d9c83462089760a01044c63f4de7b9e4c16e65de1c903fcd2 WatchSource:0}: Error finding container c248fea96cf7597d9c83462089760a01044c63f4de7b9e4c16e65de1c903fcd2: Status 404 returned error can't find the container with id c248fea96cf7597d9c83462089760a01044c63f4de7b9e4c16e65de1c903fcd2 Nov 30 08:20:31 crc kubenswrapper[4941]: I1130 08:20:31.265898 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9c47-account-create-update-tfxnb" event={"ID":"2bc4835a-9b82-4dcd-940d-24485fbb131c","Type":"ContainerStarted","Data":"c248fea96cf7597d9c83462089760a01044c63f4de7b9e4c16e65de1c903fcd2"} Nov 30 08:20:31 crc kubenswrapper[4941]: I1130 08:20:31.267487 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2dsp7" event={"ID":"e0b24026-5088-4a66-ad1c-5088d4611867","Type":"ContainerStarted","Data":"57f37f5a5f1b34f839c76bb40cc8911dfe9aa9164c0fe40c4516474b6db36d8d"} Nov 30 08:20:31 crc kubenswrapper[4941]: I1130 08:20:31.267520 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2dsp7" event={"ID":"e0b24026-5088-4a66-ad1c-5088d4611867","Type":"ContainerStarted","Data":"24718f5d8182bee45ed85cb8f3222b02df2068817fc8f84ef64f880d9b18c1a9"} Nov 30 08:20:31 crc kubenswrapper[4941]: I1130 08:20:31.292086 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-2dsp7" podStartSLOduration=1.29206381 podStartE2EDuration="1.29206381s" podCreationTimestamp="2025-11-30 08:20:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:20:31.281854774 +0000 UTC m=+5652.050026403" watchObservedRunningTime="2025-11-30 08:20:31.29206381 +0000 UTC m=+5652.060235419" Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.299239 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerStarted","Data":"2e7aae57e54dfa2140547023fc663d83f72da68597b1135770862e6992a01a5c"} Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.300820 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.303587 4941 generic.go:334] "Generic (PLEG): container finished" podID="2bc4835a-9b82-4dcd-940d-24485fbb131c" containerID="e5a1913aeb7fccf12c6b2321ab6c8807beb3e3838d3bd13b1d979b8bc1dc10a8" exitCode=0 Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.303706 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9c47-account-create-update-tfxnb" event={"ID":"2bc4835a-9b82-4dcd-940d-24485fbb131c","Type":"ContainerDied","Data":"e5a1913aeb7fccf12c6b2321ab6c8807beb3e3838d3bd13b1d979b8bc1dc10a8"} Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.306516 4941 generic.go:334] "Generic (PLEG): container finished" podID="e0b24026-5088-4a66-ad1c-5088d4611867" containerID="57f37f5a5f1b34f839c76bb40cc8911dfe9aa9164c0fe40c4516474b6db36d8d" exitCode=0 Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.306561 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2dsp7" event={"ID":"e0b24026-5088-4a66-ad1c-5088d4611867","Type":"ContainerDied","Data":"57f37f5a5f1b34f839c76bb40cc8911dfe9aa9164c0fe40c4516474b6db36d8d"} Nov 30 08:20:32 crc kubenswrapper[4941]: I1130 08:20:32.351290 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.347436333 podStartE2EDuration="5.351263513s" podCreationTimestamp="2025-11-30 08:20:27 +0000 UTC" firstStartedPulling="2025-11-30 08:20:28.204056774 +0000 UTC m=+5648.972228383" lastFinishedPulling="2025-11-30 08:20:31.207883954 +0000 UTC m=+5651.976055563" observedRunningTime="2025-11-30 08:20:32.33664863 +0000 UTC m=+5653.104820229" watchObservedRunningTime="2025-11-30 08:20:32.351263513 +0000 UTC m=+5653.119435122" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.011146 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.115558 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.167788 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctgdg\" (UniqueName: \"kubernetes.io/projected/e0b24026-5088-4a66-ad1c-5088d4611867-kube-api-access-ctgdg\") pod \"e0b24026-5088-4a66-ad1c-5088d4611867\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.167958 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b24026-5088-4a66-ad1c-5088d4611867-operator-scripts\") pod \"e0b24026-5088-4a66-ad1c-5088d4611867\" (UID: \"e0b24026-5088-4a66-ad1c-5088d4611867\") " Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.168433 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0b24026-5088-4a66-ad1c-5088d4611867-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0b24026-5088-4a66-ad1c-5088d4611867" (UID: "e0b24026-5088-4a66-ad1c-5088d4611867"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.168617 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0b24026-5088-4a66-ad1c-5088d4611867-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.185116 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b24026-5088-4a66-ad1c-5088d4611867-kube-api-access-ctgdg" (OuterVolumeSpecName: "kube-api-access-ctgdg") pod "e0b24026-5088-4a66-ad1c-5088d4611867" (UID: "e0b24026-5088-4a66-ad1c-5088d4611867"). InnerVolumeSpecName "kube-api-access-ctgdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.270133 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc4835a-9b82-4dcd-940d-24485fbb131c-operator-scripts\") pod \"2bc4835a-9b82-4dcd-940d-24485fbb131c\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.270204 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk9jx\" (UniqueName: \"kubernetes.io/projected/2bc4835a-9b82-4dcd-940d-24485fbb131c-kube-api-access-vk9jx\") pod \"2bc4835a-9b82-4dcd-940d-24485fbb131c\" (UID: \"2bc4835a-9b82-4dcd-940d-24485fbb131c\") " Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.270886 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctgdg\" (UniqueName: \"kubernetes.io/projected/e0b24026-5088-4a66-ad1c-5088d4611867-kube-api-access-ctgdg\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.270934 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bc4835a-9b82-4dcd-940d-24485fbb131c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2bc4835a-9b82-4dcd-940d-24485fbb131c" (UID: "2bc4835a-9b82-4dcd-940d-24485fbb131c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.275158 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc4835a-9b82-4dcd-940d-24485fbb131c-kube-api-access-vk9jx" (OuterVolumeSpecName: "kube-api-access-vk9jx") pod "2bc4835a-9b82-4dcd-940d-24485fbb131c" (UID: "2bc4835a-9b82-4dcd-940d-24485fbb131c"). InnerVolumeSpecName "kube-api-access-vk9jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.332094 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-9c47-account-create-update-tfxnb" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.332083 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-9c47-account-create-update-tfxnb" event={"ID":"2bc4835a-9b82-4dcd-940d-24485fbb131c","Type":"ContainerDied","Data":"c248fea96cf7597d9c83462089760a01044c63f4de7b9e4c16e65de1c903fcd2"} Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.332212 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c248fea96cf7597d9c83462089760a01044c63f4de7b9e4c16e65de1c903fcd2" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.334697 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-2dsp7" event={"ID":"e0b24026-5088-4a66-ad1c-5088d4611867","Type":"ContainerDied","Data":"24718f5d8182bee45ed85cb8f3222b02df2068817fc8f84ef64f880d9b18c1a9"} Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.334743 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24718f5d8182bee45ed85cb8f3222b02df2068817fc8f84ef64f880d9b18c1a9" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.334773 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-2dsp7" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.372807 4941 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc4835a-9b82-4dcd-940d-24485fbb131c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:34 crc kubenswrapper[4941]: I1130 08:20:34.372850 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk9jx\" (UniqueName: \"kubernetes.io/projected/2bc4835a-9b82-4dcd-940d-24485fbb131c-kube-api-access-vk9jx\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.620660 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-d9gzb"] Nov 30 08:20:35 crc kubenswrapper[4941]: E1130 08:20:35.621599 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc4835a-9b82-4dcd-940d-24485fbb131c" containerName="mariadb-account-create-update" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.621614 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc4835a-9b82-4dcd-940d-24485fbb131c" containerName="mariadb-account-create-update" Nov 30 08:20:35 crc kubenswrapper[4941]: E1130 08:20:35.621637 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b24026-5088-4a66-ad1c-5088d4611867" containerName="mariadb-database-create" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.621643 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b24026-5088-4a66-ad1c-5088d4611867" containerName="mariadb-database-create" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.621844 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b24026-5088-4a66-ad1c-5088d4611867" containerName="mariadb-database-create" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.621873 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc4835a-9b82-4dcd-940d-24485fbb131c" containerName="mariadb-account-create-update" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.622968 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.626187 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-88xh7" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.626563 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.631393 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-d9gzb"] Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.703496 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-config-data\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.704013 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-job-config-data\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.704141 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqdzd\" (UniqueName: \"kubernetes.io/projected/7294eefc-5174-4c48-8e7f-c52377ded802-kube-api-access-tqdzd\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.704262 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-combined-ca-bundle\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.806974 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-job-config-data\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.807033 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqdzd\" (UniqueName: \"kubernetes.io/projected/7294eefc-5174-4c48-8e7f-c52377ded802-kube-api-access-tqdzd\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.807074 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-combined-ca-bundle\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.810298 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-config-data\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.814693 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-job-config-data\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.814920 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-combined-ca-bundle\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.828598 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqdzd\" (UniqueName: \"kubernetes.io/projected/7294eefc-5174-4c48-8e7f-c52377ded802-kube-api-access-tqdzd\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.829120 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-config-data\") pod \"manila-db-sync-d9gzb\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:35 crc kubenswrapper[4941]: I1130 08:20:35.983648 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:36 crc kubenswrapper[4941]: I1130 08:20:36.762164 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-d9gzb"] Nov 30 08:20:37 crc kubenswrapper[4941]: I1130 08:20:37.364720 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-d9gzb" event={"ID":"7294eefc-5174-4c48-8e7f-c52377ded802","Type":"ContainerStarted","Data":"b67ff237d967e354e726bae5b1951ad8d851ab71a95528a972fdfb616682fb90"} Nov 30 08:20:42 crc kubenswrapper[4941]: I1130 08:20:42.419202 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-d9gzb" event={"ID":"7294eefc-5174-4c48-8e7f-c52377ded802","Type":"ContainerStarted","Data":"2dbadf78e233efbb05b40fe18411f4c678a011e4fa77c655a1a4e1e34fd5f8c6"} Nov 30 08:20:42 crc kubenswrapper[4941]: I1130 08:20:42.442240 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-d9gzb" podStartSLOduration=2.655930096 podStartE2EDuration="7.442215344s" podCreationTimestamp="2025-11-30 08:20:35 +0000 UTC" firstStartedPulling="2025-11-30 08:20:36.74980722 +0000 UTC m=+5657.517978829" lastFinishedPulling="2025-11-30 08:20:41.536092468 +0000 UTC m=+5662.304264077" observedRunningTime="2025-11-30 08:20:42.434111582 +0000 UTC m=+5663.202283211" watchObservedRunningTime="2025-11-30 08:20:42.442215344 +0000 UTC m=+5663.210386953" Nov 30 08:20:44 crc kubenswrapper[4941]: I1130 08:20:44.447320 4941 generic.go:334] "Generic (PLEG): container finished" podID="7294eefc-5174-4c48-8e7f-c52377ded802" containerID="2dbadf78e233efbb05b40fe18411f4c678a011e4fa77c655a1a4e1e34fd5f8c6" exitCode=0 Nov 30 08:20:44 crc kubenswrapper[4941]: I1130 08:20:44.447383 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-d9gzb" event={"ID":"7294eefc-5174-4c48-8e7f-c52377ded802","Type":"ContainerDied","Data":"2dbadf78e233efbb05b40fe18411f4c678a011e4fa77c655a1a4e1e34fd5f8c6"} Nov 30 08:20:45 crc kubenswrapper[4941]: I1130 08:20:45.953758 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.065139 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-config-data\") pod \"7294eefc-5174-4c48-8e7f-c52377ded802\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.065663 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-combined-ca-bundle\") pod \"7294eefc-5174-4c48-8e7f-c52377ded802\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.065989 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqdzd\" (UniqueName: \"kubernetes.io/projected/7294eefc-5174-4c48-8e7f-c52377ded802-kube-api-access-tqdzd\") pod \"7294eefc-5174-4c48-8e7f-c52377ded802\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.066020 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-job-config-data\") pod \"7294eefc-5174-4c48-8e7f-c52377ded802\" (UID: \"7294eefc-5174-4c48-8e7f-c52377ded802\") " Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.070739 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7294eefc-5174-4c48-8e7f-c52377ded802-kube-api-access-tqdzd" (OuterVolumeSpecName: "kube-api-access-tqdzd") pod "7294eefc-5174-4c48-8e7f-c52377ded802" (UID: "7294eefc-5174-4c48-8e7f-c52377ded802"). InnerVolumeSpecName "kube-api-access-tqdzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.073008 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "7294eefc-5174-4c48-8e7f-c52377ded802" (UID: "7294eefc-5174-4c48-8e7f-c52377ded802"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.073693 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-config-data" (OuterVolumeSpecName: "config-data") pod "7294eefc-5174-4c48-8e7f-c52377ded802" (UID: "7294eefc-5174-4c48-8e7f-c52377ded802"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.093555 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7294eefc-5174-4c48-8e7f-c52377ded802" (UID: "7294eefc-5174-4c48-8e7f-c52377ded802"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.167884 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.167911 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.167928 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqdzd\" (UniqueName: \"kubernetes.io/projected/7294eefc-5174-4c48-8e7f-c52377ded802-kube-api-access-tqdzd\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.167943 4941 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/7294eefc-5174-4c48-8e7f-c52377ded802-job-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.475434 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-d9gzb" event={"ID":"7294eefc-5174-4c48-8e7f-c52377ded802","Type":"ContainerDied","Data":"b67ff237d967e354e726bae5b1951ad8d851ab71a95528a972fdfb616682fb90"} Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.475476 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b67ff237d967e354e726bae5b1951ad8d851ab71a95528a972fdfb616682fb90" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.475632 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-d9gzb" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.865820 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Nov 30 08:20:46 crc kubenswrapper[4941]: E1130 08:20:46.866710 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7294eefc-5174-4c48-8e7f-c52377ded802" containerName="manila-db-sync" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.866735 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7294eefc-5174-4c48-8e7f-c52377ded802" containerName="manila-db-sync" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.867007 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7294eefc-5174-4c48-8e7f-c52377ded802" containerName="manila-db-sync" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.868543 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.871453 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.871640 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.871903 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-88xh7" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.872069 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.884060 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.885509 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-scripts\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.885696 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/272642ef-9daa-4958-9645-ac66bdd43cce-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.885861 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.886033 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ghtj\" (UniqueName: \"kubernetes.io/projected/272642ef-9daa-4958-9645-ac66bdd43cce-kube-api-access-6ghtj\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.886170 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.886345 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-config-data\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.891646 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.898299 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.924671 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.950029 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987168 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-config-data\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987233 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987257 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/965722c8-5980-41b0-92e7-ebca41c408e2-ceph\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987280 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965722c8-5980-41b0-92e7-ebca41c408e2-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987305 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-scripts\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987361 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-scripts\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987377 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/272642ef-9daa-4958-9645-ac66bdd43cce-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987399 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/965722c8-5980-41b0-92e7-ebca41c408e2-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987422 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-config-data\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987490 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987515 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ghtj\" (UniqueName: \"kubernetes.io/projected/272642ef-9daa-4958-9645-ac66bdd43cce-kube-api-access-6ghtj\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987572 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4npss\" (UniqueName: \"kubernetes.io/projected/965722c8-5980-41b0-92e7-ebca41c408e2-kube-api-access-4npss\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.987593 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.988772 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/272642ef-9daa-4958-9645-ac66bdd43cce-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:46 crc kubenswrapper[4941]: I1130 08:20:46.992237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:46.999994 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-scripts\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.000240 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-config-data\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.003951 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/272642ef-9daa-4958-9645-ac66bdd43cce-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.029001 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f54977c-kbp9d"] Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.031028 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ghtj\" (UniqueName: \"kubernetes.io/projected/272642ef-9daa-4958-9645-ac66bdd43cce-kube-api-access-6ghtj\") pod \"manila-scheduler-0\" (UID: \"272642ef-9daa-4958-9645-ac66bdd43cce\") " pod="openstack/manila-scheduler-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.031291 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.048441 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f54977c-kbp9d"] Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089446 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4npss\" (UniqueName: \"kubernetes.io/projected/965722c8-5980-41b0-92e7-ebca41c408e2-kube-api-access-4npss\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089516 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-sb\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089554 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67gvm\" (UniqueName: \"kubernetes.io/projected/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-kube-api-access-67gvm\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089577 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089601 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-dns-svc\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089620 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/965722c8-5980-41b0-92e7-ebca41c408e2-ceph\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089643 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965722c8-5980-41b0-92e7-ebca41c408e2-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089676 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-scripts\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089738 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/965722c8-5980-41b0-92e7-ebca41c408e2-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089761 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-config-data\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.089805 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-config\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.090313 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/965722c8-5980-41b0-92e7-ebca41c408e2-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.090478 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/965722c8-5980-41b0-92e7-ebca41c408e2-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.091096 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.091126 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-nb\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.092932 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-scripts\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.094057 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.094782 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-config-data\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.095191 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/965722c8-5980-41b0-92e7-ebca41c408e2-ceph\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.097893 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/965722c8-5980-41b0-92e7-ebca41c408e2-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.112970 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4npss\" (UniqueName: \"kubernetes.io/projected/965722c8-5980-41b0-92e7-ebca41c408e2-kube-api-access-4npss\") pod \"manila-share-share1-0\" (UID: \"965722c8-5980-41b0-92e7-ebca41c408e2\") " pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.200209 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.203434 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67gvm\" (UniqueName: \"kubernetes.io/projected/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-kube-api-access-67gvm\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.203481 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-dns-svc\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.203674 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-config\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.203714 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-nb\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.203790 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-sb\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.204820 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-sb\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.205430 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-config\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.205604 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-nb\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.206553 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-dns-svc\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.221440 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.221877 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.224651 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.238619 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.252487 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.253416 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67gvm\" (UniqueName: \"kubernetes.io/projected/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-kube-api-access-67gvm\") pod \"dnsmasq-dns-f54977c-kbp9d\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.305717 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.306678 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33eeedb0-e517-4da2-a25e-0faa7c669fde-logs\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.307031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-config-data\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.307137 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-scripts\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.307258 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33eeedb0-e517-4da2-a25e-0faa7c669fde-etc-machine-id\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.307438 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fg6f\" (UniqueName: \"kubernetes.io/projected/33eeedb0-e517-4da2-a25e-0faa7c669fde-kube-api-access-6fg6f\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.307567 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-config-data-custom\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409593 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409681 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33eeedb0-e517-4da2-a25e-0faa7c669fde-logs\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409707 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-config-data\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409732 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-scripts\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409776 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33eeedb0-e517-4da2-a25e-0faa7c669fde-etc-machine-id\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409812 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fg6f\" (UniqueName: \"kubernetes.io/projected/33eeedb0-e517-4da2-a25e-0faa7c669fde-kube-api-access-6fg6f\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.409833 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-config-data-custom\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.411572 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/33eeedb0-e517-4da2-a25e-0faa7c669fde-etc-machine-id\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.412633 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33eeedb0-e517-4da2-a25e-0faa7c669fde-logs\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.417362 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-scripts\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.421173 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.421940 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-config-data-custom\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.427071 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33eeedb0-e517-4da2-a25e-0faa7c669fde-config-data\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.430389 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fg6f\" (UniqueName: \"kubernetes.io/projected/33eeedb0-e517-4da2-a25e-0faa7c669fde-kube-api-access-6fg6f\") pod \"manila-api-0\" (UID: \"33eeedb0-e517-4da2-a25e-0faa7c669fde\") " pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.508087 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.604299 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 30 08:20:47 crc kubenswrapper[4941]: I1130 08:20:47.798931 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 30 08:20:47 crc kubenswrapper[4941]: W1130 08:20:47.826583 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod272642ef_9daa_4958_9645_ac66bdd43cce.slice/crio-218788739d56830f1b87a075d53b41aa14a67911fb62ba317afaf7bf9360d692 WatchSource:0}: Error finding container 218788739d56830f1b87a075d53b41aa14a67911fb62ba317afaf7bf9360d692: Status 404 returned error can't find the container with id 218788739d56830f1b87a075d53b41aa14a67911fb62ba317afaf7bf9360d692 Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.098056 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 30 08:20:48 crc kubenswrapper[4941]: W1130 08:20:48.159613 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod965722c8_5980_41b0_92e7_ebca41c408e2.slice/crio-b23b5e604986e21bf987871f0b05230730a79909c4dc25cdda03624ede1d588d WatchSource:0}: Error finding container b23b5e604986e21bf987871f0b05230730a79909c4dc25cdda03624ede1d588d: Status 404 returned error can't find the container with id b23b5e604986e21bf987871f0b05230730a79909c4dc25cdda03624ede1d588d Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.180994 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.235220 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f54977c-kbp9d"] Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.529470 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54977c-kbp9d" event={"ID":"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1","Type":"ContainerStarted","Data":"a774a55184ed4d6906f7b8bf62068c02b0ebfe55eb6ab094a257fd7fb6db6560"} Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.533487 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"965722c8-5980-41b0-92e7-ebca41c408e2","Type":"ContainerStarted","Data":"b23b5e604986e21bf987871f0b05230730a79909c4dc25cdda03624ede1d588d"} Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.539662 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"33eeedb0-e517-4da2-a25e-0faa7c669fde","Type":"ContainerStarted","Data":"e98cc9cb88bda8e1c42797491f1dc71a338f5f10d712ed19570f51dcd1bcd72a"} Nov 30 08:20:48 crc kubenswrapper[4941]: I1130 08:20:48.543817 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"272642ef-9daa-4958-9645-ac66bdd43cce","Type":"ContainerStarted","Data":"218788739d56830f1b87a075d53b41aa14a67911fb62ba317afaf7bf9360d692"} Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.120743 4941 scope.go:117] "RemoveContainer" containerID="9f512dc3396d04a9f7780fbb3233f19736fbf4746f61d33480ef85a32c2b6ed5" Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.188029 4941 scope.go:117] "RemoveContainer" containerID="cd5d2234b051f881222cc935693c7cc015798ac4a998b0b0ca4be7af0628d931" Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.250230 4941 scope.go:117] "RemoveContainer" containerID="5e2a316df35969303600a64df9dd4bb42597798100a3c671963edba1622a719f" Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.568342 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"272642ef-9daa-4958-9645-ac66bdd43cce","Type":"ContainerStarted","Data":"907bf7267cd2b62704d2caefaafd7f9764395c0bf59d6ecb081faa7d19a0ca2d"} Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.568812 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"272642ef-9daa-4958-9645-ac66bdd43cce","Type":"ContainerStarted","Data":"f5ce59412754db69ef8dd94de4f28c395059f015ae73b9315f0c35913690c144"} Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.571979 4941 generic.go:334] "Generic (PLEG): container finished" podID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerID="95f57afa8092b4380f0ce947cb61dc00c885c915c417a99a3769654cb2a17239" exitCode=0 Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.572072 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54977c-kbp9d" event={"ID":"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1","Type":"ContainerDied","Data":"95f57afa8092b4380f0ce947cb61dc00c885c915c417a99a3769654cb2a17239"} Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.585541 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"33eeedb0-e517-4da2-a25e-0faa7c669fde","Type":"ContainerStarted","Data":"ab4eb21e2da5cfa341e5d449a3d1a68ee13b51bd6e1a57f23c14efcddced3fe8"} Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.585605 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"33eeedb0-e517-4da2-a25e-0faa7c669fde","Type":"ContainerStarted","Data":"4d1adaaa934c29a4c9837f3950f171716d7ae78739d0e21ee471d164db0998ae"} Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.586442 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.656629 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=2.6566037319999998 podStartE2EDuration="2.656603732s" podCreationTimestamp="2025-11-30 08:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:20:49.64071717 +0000 UTC m=+5670.408888779" watchObservedRunningTime="2025-11-30 08:20:49.656603732 +0000 UTC m=+5670.424775341" Nov 30 08:20:49 crc kubenswrapper[4941]: I1130 08:20:49.701200 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.319213659 podStartE2EDuration="3.701175992s" podCreationTimestamp="2025-11-30 08:20:46 +0000 UTC" firstStartedPulling="2025-11-30 08:20:47.829515312 +0000 UTC m=+5668.597686921" lastFinishedPulling="2025-11-30 08:20:48.211477645 +0000 UTC m=+5668.979649254" observedRunningTime="2025-11-30 08:20:49.661780212 +0000 UTC m=+5670.429951821" watchObservedRunningTime="2025-11-30 08:20:49.701175992 +0000 UTC m=+5670.469347601" Nov 30 08:20:50 crc kubenswrapper[4941]: I1130 08:20:50.629297 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54977c-kbp9d" event={"ID":"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1","Type":"ContainerStarted","Data":"05e4f23f35e4646ff2766557762474d1d69aece815a1a6ca444357c84bd883de"} Nov 30 08:20:50 crc kubenswrapper[4941]: I1130 08:20:50.630847 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:50 crc kubenswrapper[4941]: I1130 08:20:50.664935 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f54977c-kbp9d" podStartSLOduration=4.664909379 podStartE2EDuration="4.664909379s" podCreationTimestamp="2025-11-30 08:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:20:50.66268594 +0000 UTC m=+5671.430857539" watchObservedRunningTime="2025-11-30 08:20:50.664909379 +0000 UTC m=+5671.433080988" Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.183376 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.184061 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-central-agent" containerID="cri-o://6caf49ac800d31c774037f1dfb3fd17f6172d6613f841a0e92d842bdeff34f9a" gracePeriod=30 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.184467 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="sg-core" containerID="cri-o://189fa0978a6a8dff5b6c651a62d1f5878c4b2b905d89b308df415d7448fe8312" gracePeriod=30 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.184538 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-notification-agent" containerID="cri-o://50ac0c93cbcd7eca02ec28b25b269f7a257d1aa9ff34230ce77410042d080302" gracePeriod=30 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.184600 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="proxy-httpd" containerID="cri-o://2e7aae57e54dfa2140547023fc663d83f72da68597b1135770862e6992a01a5c" gracePeriod=30 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.207068 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.665831 4941 generic.go:334] "Generic (PLEG): container finished" podID="f357ee97-025b-4963-8332-d2c5ba303362" containerID="2e7aae57e54dfa2140547023fc663d83f72da68597b1135770862e6992a01a5c" exitCode=0 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.666407 4941 generic.go:334] "Generic (PLEG): container finished" podID="f357ee97-025b-4963-8332-d2c5ba303362" containerID="189fa0978a6a8dff5b6c651a62d1f5878c4b2b905d89b308df415d7448fe8312" exitCode=2 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.666481 4941 generic.go:334] "Generic (PLEG): container finished" podID="f357ee97-025b-4963-8332-d2c5ba303362" containerID="6caf49ac800d31c774037f1dfb3fd17f6172d6613f841a0e92d842bdeff34f9a" exitCode=0 Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.666066 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerDied","Data":"2e7aae57e54dfa2140547023fc663d83f72da68597b1135770862e6992a01a5c"} Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.666626 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerDied","Data":"189fa0978a6a8dff5b6c651a62d1f5878c4b2b905d89b308df415d7448fe8312"} Nov 30 08:20:52 crc kubenswrapper[4941]: I1130 08:20:52.666708 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerDied","Data":"6caf49ac800d31c774037f1dfb3fd17f6172d6613f841a0e92d842bdeff34f9a"} Nov 30 08:20:55 crc kubenswrapper[4941]: I1130 08:20:55.708250 4941 generic.go:334] "Generic (PLEG): container finished" podID="f357ee97-025b-4963-8332-d2c5ba303362" containerID="50ac0c93cbcd7eca02ec28b25b269f7a257d1aa9ff34230ce77410042d080302" exitCode=0 Nov 30 08:20:55 crc kubenswrapper[4941]: I1130 08:20:55.708310 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerDied","Data":"50ac0c93cbcd7eca02ec28b25b269f7a257d1aa9ff34230ce77410042d080302"} Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.648976 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.708540 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-run-httpd\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709341 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-log-httpd\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709035 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709501 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw8zf\" (UniqueName: \"kubernetes.io/projected/f357ee97-025b-4963-8332-d2c5ba303362-kube-api-access-sw8zf\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709543 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-sg-core-conf-yaml\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709596 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-config-data\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709774 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-combined-ca-bundle\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.709807 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-scripts\") pod \"f357ee97-025b-4963-8332-d2c5ba303362\" (UID: \"f357ee97-025b-4963-8332-d2c5ba303362\") " Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.710163 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.710540 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.710559 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f357ee97-025b-4963-8332-d2c5ba303362-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.717570 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-scripts" (OuterVolumeSpecName: "scripts") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.717933 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f357ee97-025b-4963-8332-d2c5ba303362-kube-api-access-sw8zf" (OuterVolumeSpecName: "kube-api-access-sw8zf") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "kube-api-access-sw8zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.724185 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f357ee97-025b-4963-8332-d2c5ba303362","Type":"ContainerDied","Data":"6959e5adc79e6c20e20ddabeddbec36a864d824638b28b460011a4e0c009703e"} Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.724265 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.724352 4941 scope.go:117] "RemoveContainer" containerID="2e7aae57e54dfa2140547023fc663d83f72da68597b1135770862e6992a01a5c" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.754658 4941 scope.go:117] "RemoveContainer" containerID="189fa0978a6a8dff5b6c651a62d1f5878c4b2b905d89b308df415d7448fe8312" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.764985 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.781415 4941 scope.go:117] "RemoveContainer" containerID="50ac0c93cbcd7eca02ec28b25b269f7a257d1aa9ff34230ce77410042d080302" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.808574 4941 scope.go:117] "RemoveContainer" containerID="6caf49ac800d31c774037f1dfb3fd17f6172d6613f841a0e92d842bdeff34f9a" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.812969 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.812999 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw8zf\" (UniqueName: \"kubernetes.io/projected/f357ee97-025b-4963-8332-d2c5ba303362-kube-api-access-sw8zf\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.813010 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.818207 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.854910 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-config-data" (OuterVolumeSpecName: "config-data") pod "f357ee97-025b-4963-8332-d2c5ba303362" (UID: "f357ee97-025b-4963-8332-d2c5ba303362"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.914893 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:56 crc kubenswrapper[4941]: I1130 08:20:56.914938 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f357ee97-025b-4963-8332-d2c5ba303362-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.080431 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.101733 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.127731 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:57 crc kubenswrapper[4941]: E1130 08:20:57.128303 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-central-agent" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.128425 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-central-agent" Nov 30 08:20:57 crc kubenswrapper[4941]: E1130 08:20:57.128462 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="sg-core" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.128474 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="sg-core" Nov 30 08:20:57 crc kubenswrapper[4941]: E1130 08:20:57.128492 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="proxy-httpd" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.128501 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="proxy-httpd" Nov 30 08:20:57 crc kubenswrapper[4941]: E1130 08:20:57.128512 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-notification-agent" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.128523 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-notification-agent" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.129218 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="sg-core" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.129255 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="proxy-httpd" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.129373 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-central-agent" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.129404 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f357ee97-025b-4963-8332-d2c5ba303362" containerName="ceilometer-notification-agent" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.132844 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.135598 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.137445 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.151878 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.201973 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.221828 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-run-httpd\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.222168 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.222374 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-scripts\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.222602 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-config-data\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.222737 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-log-httpd\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.222788 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbqkb\" (UniqueName: \"kubernetes.io/projected/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-kube-api-access-dbqkb\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.222842 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.324868 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-config-data\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.325335 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-log-httpd\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.325371 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbqkb\" (UniqueName: \"kubernetes.io/projected/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-kube-api-access-dbqkb\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.325419 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.325474 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-run-httpd\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.325539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.325576 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-scripts\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.326139 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-log-httpd\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.326451 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-run-httpd\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.331757 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.331899 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-scripts\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.332177 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-config-data\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.332643 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.354487 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbqkb\" (UniqueName: \"kubernetes.io/projected/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-kube-api-access-dbqkb\") pod \"ceilometer-0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.471590 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.512496 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.544658 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f357ee97-025b-4963-8332-d2c5ba303362" path="/var/lib/kubelet/pods/f357ee97-025b-4963-8332-d2c5ba303362/volumes" Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.615419 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7474c9cf97-chjhn"] Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.629047 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerName="dnsmasq-dns" containerID="cri-o://9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f" gracePeriod=10 Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.738905 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"965722c8-5980-41b0-92e7-ebca41c408e2","Type":"ContainerStarted","Data":"0b200f939c3e77ee691d096fe242d278c0ae4652589ed9d8445751215f602aec"} Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.738951 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"965722c8-5980-41b0-92e7-ebca41c408e2","Type":"ContainerStarted","Data":"f1f724f816bb653a13b6b0677eecd309f7739b763e73ccc9b6508beafca29d9b"} Nov 30 08:20:57 crc kubenswrapper[4941]: I1130 08:20:57.770507 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.679491371 podStartE2EDuration="11.770490321s" podCreationTimestamp="2025-11-30 08:20:46 +0000 UTC" firstStartedPulling="2025-11-30 08:20:48.193997324 +0000 UTC m=+5668.962168933" lastFinishedPulling="2025-11-30 08:20:56.284996274 +0000 UTC m=+5677.053167883" observedRunningTime="2025-11-30 08:20:57.76949246 +0000 UTC m=+5678.537664069" watchObservedRunningTime="2025-11-30 08:20:57.770490321 +0000 UTC m=+5678.538661930" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.051984 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.108381 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.147066 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-sb\") pod \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.147389 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tljgt\" (UniqueName: \"kubernetes.io/projected/bea2342a-e7e2-4154-b873-c552f6c6fdbb-kube-api-access-tljgt\") pod \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.147638 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-config\") pod \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.147855 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc\") pod \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.148063 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-nb\") pod \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.167769 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bea2342a-e7e2-4154-b873-c552f6c6fdbb-kube-api-access-tljgt" (OuterVolumeSpecName: "kube-api-access-tljgt") pod "bea2342a-e7e2-4154-b873-c552f6c6fdbb" (UID: "bea2342a-e7e2-4154-b873-c552f6c6fdbb"). InnerVolumeSpecName "kube-api-access-tljgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.212069 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-config" (OuterVolumeSpecName: "config") pod "bea2342a-e7e2-4154-b873-c552f6c6fdbb" (UID: "bea2342a-e7e2-4154-b873-c552f6c6fdbb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.251027 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bea2342a-e7e2-4154-b873-c552f6c6fdbb" (UID: "bea2342a-e7e2-4154-b873-c552f6c6fdbb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.254031 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc\") pod \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\" (UID: \"bea2342a-e7e2-4154-b873-c552f6c6fdbb\") " Nov 30 08:20:58 crc kubenswrapper[4941]: W1130 08:20:58.254798 4941 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/bea2342a-e7e2-4154-b873-c552f6c6fdbb/volumes/kubernetes.io~configmap/dns-svc Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.254832 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bea2342a-e7e2-4154-b873-c552f6c6fdbb" (UID: "bea2342a-e7e2-4154-b873-c552f6c6fdbb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.255868 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tljgt\" (UniqueName: \"kubernetes.io/projected/bea2342a-e7e2-4154-b873-c552f6c6fdbb-kube-api-access-tljgt\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.255901 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.255915 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.269000 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bea2342a-e7e2-4154-b873-c552f6c6fdbb" (UID: "bea2342a-e7e2-4154-b873-c552f6c6fdbb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.281981 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bea2342a-e7e2-4154-b873-c552f6c6fdbb" (UID: "bea2342a-e7e2-4154-b873-c552f6c6fdbb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.358073 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.358646 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bea2342a-e7e2-4154-b873-c552f6c6fdbb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.775460 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerStarted","Data":"125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487"} Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.775511 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerStarted","Data":"c58efecbcf3181131c345b93fcd2ada079e727b4c2b0c01f88b0c30eecd4e88d"} Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.779080 4941 generic.go:334] "Generic (PLEG): container finished" podID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerID="9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f" exitCode=0 Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.779174 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" event={"ID":"bea2342a-e7e2-4154-b873-c552f6c6fdbb","Type":"ContainerDied","Data":"9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f"} Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.779190 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.779208 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7474c9cf97-chjhn" event={"ID":"bea2342a-e7e2-4154-b873-c552f6c6fdbb","Type":"ContainerDied","Data":"8e406a7ada6b9d030fa5b6c6e1d605f511e2c6f58b775410f06295bdb321081f"} Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.779229 4941 scope.go:117] "RemoveContainer" containerID="9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.830100 4941 scope.go:117] "RemoveContainer" containerID="4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.830554 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7474c9cf97-chjhn"] Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.846497 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7474c9cf97-chjhn"] Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.860891 4941 scope.go:117] "RemoveContainer" containerID="9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f" Nov 30 08:20:58 crc kubenswrapper[4941]: E1130 08:20:58.861602 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f\": container with ID starting with 9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f not found: ID does not exist" containerID="9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.861670 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f"} err="failed to get container status \"9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f\": rpc error: code = NotFound desc = could not find container \"9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f\": container with ID starting with 9039c06db834acdd6606cc0065dd02d482708b4ce34a74490c758891e6ba578f not found: ID does not exist" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.861706 4941 scope.go:117] "RemoveContainer" containerID="4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496" Nov 30 08:20:58 crc kubenswrapper[4941]: E1130 08:20:58.862552 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496\": container with ID starting with 4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496 not found: ID does not exist" containerID="4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496" Nov 30 08:20:58 crc kubenswrapper[4941]: I1130 08:20:58.862593 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496"} err="failed to get container status \"4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496\": rpc error: code = NotFound desc = could not find container \"4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496\": container with ID starting with 4948b77c0b1396acac9dc961259bbc4bc7ba5da94a26d586f3c9266a9aa72496 not found: ID does not exist" Nov 30 08:20:59 crc kubenswrapper[4941]: I1130 08:20:59.536879 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" path="/var/lib/kubelet/pods/bea2342a-e7e2-4154-b873-c552f6c6fdbb/volumes" Nov 30 08:20:59 crc kubenswrapper[4941]: I1130 08:20:59.812299 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerStarted","Data":"e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c"} Nov 30 08:21:00 crc kubenswrapper[4941]: I1130 08:21:00.050483 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-x5vrj"] Nov 30 08:21:00 crc kubenswrapper[4941]: I1130 08:21:00.065344 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-x5vrj"] Nov 30 08:21:00 crc kubenswrapper[4941]: I1130 08:21:00.831757 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerStarted","Data":"5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d"} Nov 30 08:21:00 crc kubenswrapper[4941]: I1130 08:21:00.869521 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.042230 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-92b9-account-create-update-bvvmg"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.051122 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-smhpg"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.062537 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-q7wck"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.072300 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-92b9-account-create-update-bvvmg"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.081274 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1620-account-create-update-h4vrp"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.089818 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-q7wck"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.099133 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1620-account-create-update-h4vrp"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.110954 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-smhpg"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.391412 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kz8px"] Nov 30 08:21:01 crc kubenswrapper[4941]: E1130 08:21:01.391903 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerName="dnsmasq-dns" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.391927 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerName="dnsmasq-dns" Nov 30 08:21:01 crc kubenswrapper[4941]: E1130 08:21:01.391978 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerName="init" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.391990 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerName="init" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.392229 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="bea2342a-e7e2-4154-b873-c552f6c6fdbb" containerName="dnsmasq-dns" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.393799 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.417946 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kz8px"] Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.436593 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-utilities\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.436702 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4tbw\" (UniqueName: \"kubernetes.io/projected/c5b46a80-4479-4c8f-955c-b2e63f1a8046-kube-api-access-x4tbw\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.442442 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-catalog-content\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.535214 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ba195b9-1fb2-446d-9644-271c87d97b4f" path="/var/lib/kubelet/pods/5ba195b9-1fb2-446d-9644-271c87d97b4f/volumes" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.536293 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="763a8718-0d67-4fda-afee-a01eebc05063" path="/var/lib/kubelet/pods/763a8718-0d67-4fda-afee-a01eebc05063/volumes" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.537067 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="913a4896-fef0-4a24-a143-d99183546680" path="/var/lib/kubelet/pods/913a4896-fef0-4a24-a143-d99183546680/volumes" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.537895 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed29adfa-9314-48f2-a5ce-b0615faace71" path="/var/lib/kubelet/pods/ed29adfa-9314-48f2-a5ce-b0615faace71/volumes" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.539636 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce27667-dc7c-41ec-837f-c924456e2e1e" path="/var/lib/kubelet/pods/fce27667-dc7c-41ec-837f-c924456e2e1e/volumes" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.547032 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4tbw\" (UniqueName: \"kubernetes.io/projected/c5b46a80-4479-4c8f-955c-b2e63f1a8046-kube-api-access-x4tbw\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.547299 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-catalog-content\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.547489 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-utilities\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.547992 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-catalog-content\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.548161 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-utilities\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.568949 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4tbw\" (UniqueName: \"kubernetes.io/projected/c5b46a80-4479-4c8f-955c-b2e63f1a8046-kube-api-access-x4tbw\") pod \"redhat-operators-kz8px\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:01 crc kubenswrapper[4941]: I1130 08:21:01.717150 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.033639 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-47e9-account-create-update-bgbkb"] Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.064756 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-47e9-account-create-update-bgbkb"] Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.250390 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kz8px"] Nov 30 08:21:02 crc kubenswrapper[4941]: W1130 08:21:02.253739 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5b46a80_4479_4c8f_955c_b2e63f1a8046.slice/crio-5fe2920bf8b06b3110e7a30b584ef05bff656c68c1b1097d2946844b4eabf907 WatchSource:0}: Error finding container 5fe2920bf8b06b3110e7a30b584ef05bff656c68c1b1097d2946844b4eabf907: Status 404 returned error can't find the container with id 5fe2920bf8b06b3110e7a30b584ef05bff656c68c1b1097d2946844b4eabf907 Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.860633 4941 generic.go:334] "Generic (PLEG): container finished" podID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerID="05d5f83f3012558b13c3d5a36900613ebfed7792e612b64f086c7129fcbbd333" exitCode=0 Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.860847 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerDied","Data":"05d5f83f3012558b13c3d5a36900613ebfed7792e612b64f086c7129fcbbd333"} Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.862484 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerStarted","Data":"5fe2920bf8b06b3110e7a30b584ef05bff656c68c1b1097d2946844b4eabf907"} Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.879570 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerStarted","Data":"ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c"} Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.879744 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-central-agent" containerID="cri-o://125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487" gracePeriod=30 Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.879781 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-notification-agent" containerID="cri-o://e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c" gracePeriod=30 Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.879803 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="sg-core" containerID="cri-o://5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d" gracePeriod=30 Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.879768 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.879955 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="proxy-httpd" containerID="cri-o://ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c" gracePeriod=30 Nov 30 08:21:02 crc kubenswrapper[4941]: I1130 08:21:02.923462 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.872607403 podStartE2EDuration="5.923411647s" podCreationTimestamp="2025-11-30 08:20:57 +0000 UTC" firstStartedPulling="2025-11-30 08:20:58.042057576 +0000 UTC m=+5678.810229185" lastFinishedPulling="2025-11-30 08:21:02.09286182 +0000 UTC m=+5682.861033429" observedRunningTime="2025-11-30 08:21:02.91352069 +0000 UTC m=+5683.681692299" watchObservedRunningTime="2025-11-30 08:21:02.923411647 +0000 UTC m=+5683.691583296" Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.544599 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e64460e1-5b29-4c67-8b81-ca53d91dcfd2" path="/var/lib/kubelet/pods/e64460e1-5b29-4c67-8b81-ca53d91dcfd2/volumes" Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.892410 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerStarted","Data":"45879672938cf8dff60d6053339269fa0550b37eafd1c095caee1c43b98064c2"} Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.894654 4941 generic.go:334] "Generic (PLEG): container finished" podID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerID="ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c" exitCode=0 Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.894720 4941 generic.go:334] "Generic (PLEG): container finished" podID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerID="5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d" exitCode=2 Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.894719 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerDied","Data":"ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c"} Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.894782 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerDied","Data":"5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d"} Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.894794 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerDied","Data":"e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c"} Nov 30 08:21:03 crc kubenswrapper[4941]: I1130 08:21:03.894740 4941 generic.go:334] "Generic (PLEG): container finished" podID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerID="e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c" exitCode=0 Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.579729 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.659768 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-log-httpd\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.659923 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-sg-core-conf-yaml\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.659952 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbqkb\" (UniqueName: \"kubernetes.io/projected/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-kube-api-access-dbqkb\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.659969 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-scripts\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.660008 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-run-httpd\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.660040 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-combined-ca-bundle\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.660105 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-config-data\") pod \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\" (UID: \"3ad9cd97-3801-4901-a8c8-f90d69faf7a0\") " Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.708112 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.710679 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.717197 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-scripts" (OuterVolumeSpecName: "scripts") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.720402 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-kube-api-access-dbqkb" (OuterVolumeSpecName: "kube-api-access-dbqkb") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "kube-api-access-dbqkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.762728 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbqkb\" (UniqueName: \"kubernetes.io/projected/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-kube-api-access-dbqkb\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.763156 4941 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-scripts\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.763249 4941 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.763363 4941 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.766089 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.782989 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-config-data" (OuterVolumeSpecName: "config-data") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.848072 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ad9cd97-3801-4901-a8c8-f90d69faf7a0" (UID: "3ad9cd97-3801-4901-a8c8-f90d69faf7a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.866007 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.866045 4941 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.866058 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ad9cd97-3801-4901-a8c8-f90d69faf7a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.914076 4941 generic.go:334] "Generic (PLEG): container finished" podID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerID="125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487" exitCode=0 Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.914179 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerDied","Data":"125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487"} Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.914250 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3ad9cd97-3801-4901-a8c8-f90d69faf7a0","Type":"ContainerDied","Data":"c58efecbcf3181131c345b93fcd2ada079e727b4c2b0c01f88b0c30eecd4e88d"} Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.914302 4941 scope.go:117] "RemoveContainer" containerID="ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.914913 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.951880 4941 scope.go:117] "RemoveContainer" containerID="5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d" Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.985811 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:21:04 crc kubenswrapper[4941]: I1130 08:21:04.997279 4941 scope.go:117] "RemoveContainer" containerID="e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.013297 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.034508 4941 scope.go:117] "RemoveContainer" containerID="125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.038300 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.038924 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="sg-core" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.038946 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="sg-core" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.038983 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="proxy-httpd" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.038992 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="proxy-httpd" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.039004 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-notification-agent" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.039011 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-notification-agent" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.039028 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-central-agent" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.039035 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-central-agent" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.039235 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="sg-core" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.039246 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="proxy-httpd" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.039256 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-notification-agent" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.039277 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" containerName="ceilometer-central-agent" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.041266 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.046674 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.050767 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.057104 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.081263 4941 scope.go:117] "RemoveContainer" containerID="ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.081848 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c\": container with ID starting with ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c not found: ID does not exist" containerID="ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.081893 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c"} err="failed to get container status \"ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c\": rpc error: code = NotFound desc = could not find container \"ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c\": container with ID starting with ef0a59cb0be28a53427fe3692348ce4dc7fc31579eb51d57676e8216af25ac4c not found: ID does not exist" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.081925 4941 scope.go:117] "RemoveContainer" containerID="5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.082909 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d\": container with ID starting with 5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d not found: ID does not exist" containerID="5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.082943 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d"} err="failed to get container status \"5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d\": rpc error: code = NotFound desc = could not find container \"5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d\": container with ID starting with 5935b43e71aed0d8ad949906060ad7687e35ea83ab1dfd9a1444b5e1bac87e8d not found: ID does not exist" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.082967 4941 scope.go:117] "RemoveContainer" containerID="e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.083401 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c\": container with ID starting with e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c not found: ID does not exist" containerID="e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.083427 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c"} err="failed to get container status \"e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c\": rpc error: code = NotFound desc = could not find container \"e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c\": container with ID starting with e472a95cad2c1069c5adf8ad078edff81f41f319db8ae04727d9ea1c63e5579c not found: ID does not exist" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.083445 4941 scope.go:117] "RemoveContainer" containerID="125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487" Nov 30 08:21:05 crc kubenswrapper[4941]: E1130 08:21:05.083773 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487\": container with ID starting with 125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487 not found: ID does not exist" containerID="125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.083801 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487"} err="failed to get container status \"125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487\": rpc error: code = NotFound desc = could not find container \"125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487\": container with ID starting with 125a7859ac932e0f1d460aa458a7257c3b3c1b55f47804ee11c44f7689f14487 not found: ID does not exist" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.177720 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-run-httpd\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.177794 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6tkv\" (UniqueName: \"kubernetes.io/projected/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-kube-api-access-p6tkv\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.177883 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-scripts\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.177934 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.178033 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-config-data\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.178066 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-log-httpd\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.178084 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281624 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281774 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-config-data\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281810 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-log-httpd\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281835 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281904 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-run-httpd\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281938 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6tkv\" (UniqueName: \"kubernetes.io/projected/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-kube-api-access-p6tkv\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.281992 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-scripts\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.283648 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-log-httpd\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.283941 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-run-httpd\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.289701 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.290181 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-scripts\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.293758 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-config-data\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.294480 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.309470 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6tkv\" (UniqueName: \"kubernetes.io/projected/fe2c62f2-5689-47c8-bf38-dc1ec1547e8e-kube-api-access-p6tkv\") pod \"ceilometer-0\" (UID: \"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e\") " pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.365844 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.540240 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ad9cd97-3801-4901-a8c8-f90d69faf7a0" path="/var/lib/kubelet/pods/3ad9cd97-3801-4901-a8c8-f90d69faf7a0/volumes" Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.872455 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 30 08:21:05 crc kubenswrapper[4941]: W1130 08:21:05.880492 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe2c62f2_5689_47c8_bf38_dc1ec1547e8e.slice/crio-957408f44de8a5f370e65bef1248c2b366a419050de0a8dfcee9944f9e44c104 WatchSource:0}: Error finding container 957408f44de8a5f370e65bef1248c2b366a419050de0a8dfcee9944f9e44c104: Status 404 returned error can't find the container with id 957408f44de8a5f370e65bef1248c2b366a419050de0a8dfcee9944f9e44c104 Nov 30 08:21:05 crc kubenswrapper[4941]: I1130 08:21:05.928032 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e","Type":"ContainerStarted","Data":"957408f44de8a5f370e65bef1248c2b366a419050de0a8dfcee9944f9e44c104"} Nov 30 08:21:06 crc kubenswrapper[4941]: I1130 08:21:06.964157 4941 generic.go:334] "Generic (PLEG): container finished" podID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerID="45879672938cf8dff60d6053339269fa0550b37eafd1c095caee1c43b98064c2" exitCode=0 Nov 30 08:21:06 crc kubenswrapper[4941]: I1130 08:21:06.964485 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerDied","Data":"45879672938cf8dff60d6053339269fa0550b37eafd1c095caee1c43b98064c2"} Nov 30 08:21:07 crc kubenswrapper[4941]: I1130 08:21:07.223023 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Nov 30 08:21:07 crc kubenswrapper[4941]: I1130 08:21:07.979252 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerStarted","Data":"560645d6202a800a1d00eb338903074a980d47501e5f6701b63dc0cfcd645fe1"} Nov 30 08:21:07 crc kubenswrapper[4941]: I1130 08:21:07.983426 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e","Type":"ContainerStarted","Data":"e87343ed15c1c7526719cb751a805f945a1222ddabf89122f191378819c8914a"} Nov 30 08:21:07 crc kubenswrapper[4941]: I1130 08:21:07.983478 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e","Type":"ContainerStarted","Data":"1d65d21d3ea53ab896c58fe4a23f321167cb4263242aebab1a76dce79d3bff05"} Nov 30 08:21:08 crc kubenswrapper[4941]: I1130 08:21:08.006177 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kz8px" podStartSLOduration=2.217759276 podStartE2EDuration="7.006149149s" podCreationTimestamp="2025-11-30 08:21:01 +0000 UTC" firstStartedPulling="2025-11-30 08:21:02.868379333 +0000 UTC m=+5683.636550942" lastFinishedPulling="2025-11-30 08:21:07.656769206 +0000 UTC m=+5688.424940815" observedRunningTime="2025-11-30 08:21:08.006004685 +0000 UTC m=+5688.774176304" watchObservedRunningTime="2025-11-30 08:21:08.006149149 +0000 UTC m=+5688.774320758" Nov 30 08:21:08 crc kubenswrapper[4941]: I1130 08:21:08.943711 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Nov 30 08:21:08 crc kubenswrapper[4941]: I1130 08:21:08.994973 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e","Type":"ContainerStarted","Data":"3dc5f4144606763caa2e0d60d186f147e33491998f7738ec8f76caf460f3ca47"} Nov 30 08:21:09 crc kubenswrapper[4941]: I1130 08:21:09.043443 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Nov 30 08:21:09 crc kubenswrapper[4941]: I1130 08:21:09.099303 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Nov 30 08:21:09 crc kubenswrapper[4941]: I1130 08:21:09.886386 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 08:21:09 crc kubenswrapper[4941]: I1130 08:21:09.889476 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:09 crc kubenswrapper[4941]: I1130 08:21:09.894957 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.010042 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe2c62f2-5689-47c8-bf38-dc1ec1547e8e","Type":"ContainerStarted","Data":"682d245d3643d60a7e6149fdd99451bbda0c282622127e8a557e3dc63fe668c0"} Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.010508 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.043489 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.529496895 podStartE2EDuration="6.043465335s" podCreationTimestamp="2025-11-30 08:21:04 +0000 UTC" firstStartedPulling="2025-11-30 08:21:05.88440591 +0000 UTC m=+5686.652577519" lastFinishedPulling="2025-11-30 08:21:09.39837436 +0000 UTC m=+5690.166545959" observedRunningTime="2025-11-30 08:21:10.035878401 +0000 UTC m=+5690.804050020" watchObservedRunningTime="2025-11-30 08:21:10.043465335 +0000 UTC m=+5690.811636934" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.052929 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r92w\" (UniqueName: \"kubernetes.io/projected/56cf97ff-5196-4975-909b-a838177567ee-kube-api-access-2r92w\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.053031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-utilities\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.053136 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-catalog-content\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.154788 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-utilities\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.154913 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-catalog-content\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.155083 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r92w\" (UniqueName: \"kubernetes.io/projected/56cf97ff-5196-4975-909b-a838177567ee-kube-api-access-2r92w\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.155572 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-utilities\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.155626 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-catalog-content\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.180023 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r92w\" (UniqueName: \"kubernetes.io/projected/56cf97ff-5196-4975-909b-a838177567ee-kube-api-access-2r92w\") pod \"community-operators-xxv9s\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:10 crc kubenswrapper[4941]: I1130 08:21:10.245635 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:11 crc kubenswrapper[4941]: W1130 08:21:11.616137 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56cf97ff_5196_4975_909b_a838177567ee.slice/crio-5c8a4c2630a1160373c5e0352ecad17075aa79cad516131bec1f3903fcf91320 WatchSource:0}: Error finding container 5c8a4c2630a1160373c5e0352ecad17075aa79cad516131bec1f3903fcf91320: Status 404 returned error can't find the container with id 5c8a4c2630a1160373c5e0352ecad17075aa79cad516131bec1f3903fcf91320 Nov 30 08:21:11 crc kubenswrapper[4941]: I1130 08:21:11.636932 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 08:21:11 crc kubenswrapper[4941]: I1130 08:21:11.724128 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:11 crc kubenswrapper[4941]: I1130 08:21:11.729804 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:12 crc kubenswrapper[4941]: I1130 08:21:12.037802 4941 generic.go:334] "Generic (PLEG): container finished" podID="56cf97ff-5196-4975-909b-a838177567ee" containerID="9907603cc8ca065b78847c064ea00df0ff7f849eff710e7a2dfaf0e370326c8b" exitCode=0 Nov 30 08:21:12 crc kubenswrapper[4941]: I1130 08:21:12.040161 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxv9s" event={"ID":"56cf97ff-5196-4975-909b-a838177567ee","Type":"ContainerDied","Data":"9907603cc8ca065b78847c064ea00df0ff7f849eff710e7a2dfaf0e370326c8b"} Nov 30 08:21:12 crc kubenswrapper[4941]: I1130 08:21:12.040196 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxv9s" event={"ID":"56cf97ff-5196-4975-909b-a838177567ee","Type":"ContainerStarted","Data":"5c8a4c2630a1160373c5e0352ecad17075aa79cad516131bec1f3903fcf91320"} Nov 30 08:21:12 crc kubenswrapper[4941]: I1130 08:21:12.839051 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kz8px" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="registry-server" probeResult="failure" output=< Nov 30 08:21:12 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:21:12 crc kubenswrapper[4941]: > Nov 30 08:21:18 crc kubenswrapper[4941]: I1130 08:21:18.110181 4941 generic.go:334] "Generic (PLEG): container finished" podID="56cf97ff-5196-4975-909b-a838177567ee" containerID="1dfa34af432e9922d2f165ebdf1e46105b476e0a0bc9ac521fb30723f9ff4b13" exitCode=0 Nov 30 08:21:18 crc kubenswrapper[4941]: I1130 08:21:18.110307 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxv9s" event={"ID":"56cf97ff-5196-4975-909b-a838177567ee","Type":"ContainerDied","Data":"1dfa34af432e9922d2f165ebdf1e46105b476e0a0bc9ac521fb30723f9ff4b13"} Nov 30 08:21:19 crc kubenswrapper[4941]: I1130 08:21:19.126202 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxv9s" event={"ID":"56cf97ff-5196-4975-909b-a838177567ee","Type":"ContainerStarted","Data":"7dcaf837bd85984d992004522ef17a00cf1538973725e0d3c4e4d3b7e55495f1"} Nov 30 08:21:19 crc kubenswrapper[4941]: I1130 08:21:19.147728 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xxv9s" podStartSLOduration=3.394541082 podStartE2EDuration="10.147703846s" podCreationTimestamp="2025-11-30 08:21:09 +0000 UTC" firstStartedPulling="2025-11-30 08:21:12.042372332 +0000 UTC m=+5692.810543941" lastFinishedPulling="2025-11-30 08:21:18.795535056 +0000 UTC m=+5699.563706705" observedRunningTime="2025-11-30 08:21:19.142213456 +0000 UTC m=+5699.910385075" watchObservedRunningTime="2025-11-30 08:21:19.147703846 +0000 UTC m=+5699.915875455" Nov 30 08:21:20 crc kubenswrapper[4941]: I1130 08:21:20.245963 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:20 crc kubenswrapper[4941]: I1130 08:21:20.247514 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:21 crc kubenswrapper[4941]: I1130 08:21:21.308400 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-xxv9s" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="registry-server" probeResult="failure" output=< Nov 30 08:21:21 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:21:21 crc kubenswrapper[4941]: > Nov 30 08:21:21 crc kubenswrapper[4941]: I1130 08:21:21.773574 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:21 crc kubenswrapper[4941]: I1130 08:21:21.843442 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:22 crc kubenswrapper[4941]: I1130 08:21:22.025244 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kz8px"] Nov 30 08:21:23 crc kubenswrapper[4941]: I1130 08:21:23.188387 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kz8px" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="registry-server" containerID="cri-o://560645d6202a800a1d00eb338903074a980d47501e5f6701b63dc0cfcd645fe1" gracePeriod=2 Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.206662 4941 generic.go:334] "Generic (PLEG): container finished" podID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerID="560645d6202a800a1d00eb338903074a980d47501e5f6701b63dc0cfcd645fe1" exitCode=0 Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.206761 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerDied","Data":"560645d6202a800a1d00eb338903074a980d47501e5f6701b63dc0cfcd645fe1"} Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.207214 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kz8px" event={"ID":"c5b46a80-4479-4c8f-955c-b2e63f1a8046","Type":"ContainerDied","Data":"5fe2920bf8b06b3110e7a30b584ef05bff656c68c1b1097d2946844b4eabf907"} Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.207238 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fe2920bf8b06b3110e7a30b584ef05bff656c68c1b1097d2946844b4eabf907" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.269999 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.352697 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-catalog-content\") pod \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.352877 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-utilities\") pod \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.353065 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4tbw\" (UniqueName: \"kubernetes.io/projected/c5b46a80-4479-4c8f-955c-b2e63f1a8046-kube-api-access-x4tbw\") pod \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\" (UID: \"c5b46a80-4479-4c8f-955c-b2e63f1a8046\") " Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.354143 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-utilities" (OuterVolumeSpecName: "utilities") pod "c5b46a80-4479-4c8f-955c-b2e63f1a8046" (UID: "c5b46a80-4479-4c8f-955c-b2e63f1a8046"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.354761 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.361627 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5b46a80-4479-4c8f-955c-b2e63f1a8046-kube-api-access-x4tbw" (OuterVolumeSpecName: "kube-api-access-x4tbw") pod "c5b46a80-4479-4c8f-955c-b2e63f1a8046" (UID: "c5b46a80-4479-4c8f-955c-b2e63f1a8046"). InnerVolumeSpecName "kube-api-access-x4tbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.457144 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4tbw\" (UniqueName: \"kubernetes.io/projected/c5b46a80-4479-4c8f-955c-b2e63f1a8046-kube-api-access-x4tbw\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.466707 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5b46a80-4479-4c8f-955c-b2e63f1a8046" (UID: "c5b46a80-4479-4c8f-955c-b2e63f1a8046"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:21:24 crc kubenswrapper[4941]: I1130 08:21:24.559036 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5b46a80-4479-4c8f-955c-b2e63f1a8046-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.049638 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9r8h"] Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.062218 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c9r8h"] Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.217844 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kz8px" Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.270687 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kz8px"] Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.282963 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kz8px"] Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.534850 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3297a490-2e16-4d8f-b43b-49035dfd9d24" path="/var/lib/kubelet/pods/3297a490-2e16-4d8f-b43b-49035dfd9d24/volumes" Nov 30 08:21:25 crc kubenswrapper[4941]: I1130 08:21:25.535468 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" path="/var/lib/kubelet/pods/c5b46a80-4479-4c8f-955c-b2e63f1a8046/volumes" Nov 30 08:21:30 crc kubenswrapper[4941]: I1130 08:21:30.334252 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:30 crc kubenswrapper[4941]: I1130 08:21:30.396175 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 08:21:30 crc kubenswrapper[4941]: I1130 08:21:30.468525 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 08:21:30 crc kubenswrapper[4941]: I1130 08:21:30.586158 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 08:21:30 crc kubenswrapper[4941]: I1130 08:21:30.586493 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sx66t" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="registry-server" containerID="cri-o://5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77" gracePeriod=2 Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.219967 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sx66t" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.297484 4941 generic.go:334] "Generic (PLEG): container finished" podID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerID="5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77" exitCode=0 Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.297585 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sx66t" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.297599 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sx66t" event={"ID":"f448deec-d51f-4448-8ff6-95f1177f3c19","Type":"ContainerDied","Data":"5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77"} Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.298038 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sx66t" event={"ID":"f448deec-d51f-4448-8ff6-95f1177f3c19","Type":"ContainerDied","Data":"f1ccd14aa2bbad61bea3470a6c71fd485d79d0c629ba1cf6b79de414d3751514"} Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.298074 4941 scope.go:117] "RemoveContainer" containerID="5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.335880 4941 scope.go:117] "RemoveContainer" containerID="0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.362300 4941 scope.go:117] "RemoveContainer" containerID="b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.380312 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pskr7\" (UniqueName: \"kubernetes.io/projected/f448deec-d51f-4448-8ff6-95f1177f3c19-kube-api-access-pskr7\") pod \"f448deec-d51f-4448-8ff6-95f1177f3c19\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.380385 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-catalog-content\") pod \"f448deec-d51f-4448-8ff6-95f1177f3c19\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.380435 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-utilities\") pod \"f448deec-d51f-4448-8ff6-95f1177f3c19\" (UID: \"f448deec-d51f-4448-8ff6-95f1177f3c19\") " Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.383321 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-utilities" (OuterVolumeSpecName: "utilities") pod "f448deec-d51f-4448-8ff6-95f1177f3c19" (UID: "f448deec-d51f-4448-8ff6-95f1177f3c19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.389136 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f448deec-d51f-4448-8ff6-95f1177f3c19-kube-api-access-pskr7" (OuterVolumeSpecName: "kube-api-access-pskr7") pod "f448deec-d51f-4448-8ff6-95f1177f3c19" (UID: "f448deec-d51f-4448-8ff6-95f1177f3c19"). InnerVolumeSpecName "kube-api-access-pskr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.435566 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f448deec-d51f-4448-8ff6-95f1177f3c19" (UID: "f448deec-d51f-4448-8ff6-95f1177f3c19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.447974 4941 scope.go:117] "RemoveContainer" containerID="5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77" Nov 30 08:21:31 crc kubenswrapper[4941]: E1130 08:21:31.448864 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77\": container with ID starting with 5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77 not found: ID does not exist" containerID="5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.448974 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77"} err="failed to get container status \"5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77\": rpc error: code = NotFound desc = could not find container \"5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77\": container with ID starting with 5c255d8cc7b019fa85ca6d0807421aeecf9fb600bd7042bb3075282adef6dd77 not found: ID does not exist" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.449074 4941 scope.go:117] "RemoveContainer" containerID="0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720" Nov 30 08:21:31 crc kubenswrapper[4941]: E1130 08:21:31.449732 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720\": container with ID starting with 0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720 not found: ID does not exist" containerID="0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.449802 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720"} err="failed to get container status \"0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720\": rpc error: code = NotFound desc = could not find container \"0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720\": container with ID starting with 0d4a2da23c3760129c3b6961a9690962a14f67453fc0f022cde58f0439ca8720 not found: ID does not exist" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.449861 4941 scope.go:117] "RemoveContainer" containerID="b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d" Nov 30 08:21:31 crc kubenswrapper[4941]: E1130 08:21:31.450278 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d\": container with ID starting with b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d not found: ID does not exist" containerID="b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.450410 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d"} err="failed to get container status \"b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d\": rpc error: code = NotFound desc = could not find container \"b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d\": container with ID starting with b495fa4900333e102223f0c0a6bc30425865c8a708b3936cac5a6be4f79da87d not found: ID does not exist" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.483081 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pskr7\" (UniqueName: \"kubernetes.io/projected/f448deec-d51f-4448-8ff6-95f1177f3c19-kube-api-access-pskr7\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.483117 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.483126 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f448deec-d51f-4448-8ff6-95f1177f3c19-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.633520 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 08:21:31 crc kubenswrapper[4941]: I1130 08:21:31.644943 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sx66t"] Nov 30 08:21:33 crc kubenswrapper[4941]: I1130 08:21:33.544141 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" path="/var/lib/kubelet/pods/f448deec-d51f-4448-8ff6-95f1177f3c19/volumes" Nov 30 08:21:35 crc kubenswrapper[4941]: I1130 08:21:35.372974 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 30 08:21:47 crc kubenswrapper[4941]: I1130 08:21:47.046835 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-zxbm8"] Nov 30 08:21:47 crc kubenswrapper[4941]: I1130 08:21:47.062184 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-zxbm8"] Nov 30 08:21:47 crc kubenswrapper[4941]: I1130 08:21:47.539899 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2" path="/var/lib/kubelet/pods/5e70e908-4d18-4ad7-99b5-b0ca3b57a4e2/volumes" Nov 30 08:21:48 crc kubenswrapper[4941]: I1130 08:21:48.042359 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dmdkw"] Nov 30 08:21:48 crc kubenswrapper[4941]: I1130 08:21:48.055502 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dmdkw"] Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.443889 4941 scope.go:117] "RemoveContainer" containerID="8870350f29ad85874031abf61acb547306c9d0a2b3929e87239ef09ef6971a6e" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.481066 4941 scope.go:117] "RemoveContainer" containerID="ff4e9686f96b7d995147138eb7a0e7fb842515362838c07ad669c78fcfc00b57" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.575345 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9f9867d-77d1-42f5-aca3-2b84112deb56" path="/var/lib/kubelet/pods/f9f9867d-77d1-42f5-aca3-2b84112deb56/volumes" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.619347 4941 scope.go:117] "RemoveContainer" containerID="f9af4532560a62efe5cec47ef5f5e3ea4ff956766b2914285856a778fef81eca" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.697314 4941 scope.go:117] "RemoveContainer" containerID="1b37ff5150eb44ccd342e129be05564d33d508de3f8e20421fb1815bf920ae51" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.792125 4941 scope.go:117] "RemoveContainer" containerID="e21370cfaeb4aa8e987a00c22289353ec9a183bb03eafdf1e3f5e8b0e80975d3" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.838853 4941 scope.go:117] "RemoveContainer" containerID="f06d52d575bcf7601e7923ad762d59a984f595ee567f7aad19fa4c94ca590620" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.912895 4941 scope.go:117] "RemoveContainer" containerID="d1ce930786b6b8bc19f10296db2aac96177843602929014e1c5e56ac8ccc2a36" Nov 30 08:21:49 crc kubenswrapper[4941]: I1130 08:21:49.963453 4941 scope.go:117] "RemoveContainer" containerID="c1770aeab67d07993bbada27c7cd95226f99ec9d3c91227e856e86512fc145c0" Nov 30 08:21:50 crc kubenswrapper[4941]: I1130 08:21:50.021188 4941 scope.go:117] "RemoveContainer" containerID="b4032330490d57c515799bcb0b80c36b382ba44eae0d8b47a491cbc234f8f188" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.916302 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d8f86bdc9-lhkx2"] Nov 30 08:21:59 crc kubenswrapper[4941]: E1130 08:21:59.917903 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="registry-server" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.917926 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="registry-server" Nov 30 08:21:59 crc kubenswrapper[4941]: E1130 08:21:59.917969 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="extract-utilities" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.917978 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="extract-utilities" Nov 30 08:21:59 crc kubenswrapper[4941]: E1130 08:21:59.918003 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="extract-utilities" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.918012 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="extract-utilities" Nov 30 08:21:59 crc kubenswrapper[4941]: E1130 08:21:59.918041 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="extract-content" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.918049 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="extract-content" Nov 30 08:21:59 crc kubenswrapper[4941]: E1130 08:21:59.918059 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="registry-server" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.918067 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="registry-server" Nov 30 08:21:59 crc kubenswrapper[4941]: E1130 08:21:59.918078 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="extract-content" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.918085 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="extract-content" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.918401 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5b46a80-4479-4c8f-955c-b2e63f1a8046" containerName="registry-server" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.918426 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f448deec-d51f-4448-8ff6-95f1177f3c19" containerName="registry-server" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.926126 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.937959 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d8f86bdc9-lhkx2"] Nov 30 08:21:59 crc kubenswrapper[4941]: I1130 08:21:59.961601 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.020695 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-config\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.020790 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-openstack-cell1\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.020843 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.020876 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-sb\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.020906 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-dns-svc\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.020937 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp5vn\" (UniqueName: \"kubernetes.io/projected/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-kube-api-access-hp5vn\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.122922 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.123011 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-sb\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.123046 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-dns-svc\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.123086 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp5vn\" (UniqueName: \"kubernetes.io/projected/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-kube-api-access-hp5vn\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.123188 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-config\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.123236 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-openstack-cell1\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.124332 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-openstack-cell1\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.124527 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.124919 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-sb\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.125649 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-config\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.125836 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-dns-svc\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.151187 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp5vn\" (UniqueName: \"kubernetes.io/projected/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-kube-api-access-hp5vn\") pod \"dnsmasq-dns-6d8f86bdc9-lhkx2\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.279833 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:00 crc kubenswrapper[4941]: I1130 08:22:00.980054 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d8f86bdc9-lhkx2"] Nov 30 08:22:01 crc kubenswrapper[4941]: I1130 08:22:01.740533 4941 generic.go:334] "Generic (PLEG): container finished" podID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerID="4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e" exitCode=0 Nov 30 08:22:01 crc kubenswrapper[4941]: I1130 08:22:01.740657 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" event={"ID":"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee","Type":"ContainerDied","Data":"4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e"} Nov 30 08:22:01 crc kubenswrapper[4941]: I1130 08:22:01.741207 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" event={"ID":"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee","Type":"ContainerStarted","Data":"4383485f246d82a779b90e17ae636b0443e09811dedf02fa478ac12c96cc1a17"} Nov 30 08:22:02 crc kubenswrapper[4941]: I1130 08:22:02.754419 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" event={"ID":"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee","Type":"ContainerStarted","Data":"e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1"} Nov 30 08:22:02 crc kubenswrapper[4941]: I1130 08:22:02.754878 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:02 crc kubenswrapper[4941]: I1130 08:22:02.788109 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" podStartSLOduration=3.788091888 podStartE2EDuration="3.788091888s" podCreationTimestamp="2025-11-30 08:21:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:22:02.783487065 +0000 UTC m=+5743.551658694" watchObservedRunningTime="2025-11-30 08:22:02.788091888 +0000 UTC m=+5743.556263497" Nov 30 08:22:02 crc kubenswrapper[4941]: I1130 08:22:02.978580 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:22:02 crc kubenswrapper[4941]: I1130 08:22:02.979189 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:22:07 crc kubenswrapper[4941]: I1130 08:22:07.082461 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-dfwtd"] Nov 30 08:22:07 crc kubenswrapper[4941]: I1130 08:22:07.101949 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-dfwtd"] Nov 30 08:22:07 crc kubenswrapper[4941]: I1130 08:22:07.548879 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c6318bb-a4f0-4d55-90f3-8b44ac03b110" path="/var/lib/kubelet/pods/9c6318bb-a4f0-4d55-90f3-8b44ac03b110/volumes" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.282534 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.377580 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f54977c-kbp9d"] Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.383659 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f54977c-kbp9d" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerName="dnsmasq-dns" containerID="cri-o://05e4f23f35e4646ff2766557762474d1d69aece815a1a6ca444357c84bd883de" gracePeriod=10 Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.571414 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79ff8bb889-l8cq5"] Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.597739 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.645795 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79ff8bb889-l8cq5"] Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.646672 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-config\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.646885 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-ovsdbserver-sb\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.646949 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-openstack-cell1\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.647010 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc49t\" (UniqueName: \"kubernetes.io/projected/b4296723-5d2c-4376-927c-4fb06c557533-kube-api-access-pc49t\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.647130 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-dns-svc\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.647265 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-ovsdbserver-nb\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.751148 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-ovsdbserver-sb\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.751213 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-openstack-cell1\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.751256 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc49t\" (UniqueName: \"kubernetes.io/projected/b4296723-5d2c-4376-927c-4fb06c557533-kube-api-access-pc49t\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.751312 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-dns-svc\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.751365 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-ovsdbserver-nb\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.751391 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-config\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.752237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-config\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.752869 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-dns-svc\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.753033 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-ovsdbserver-sb\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.753596 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-ovsdbserver-nb\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.754164 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/b4296723-5d2c-4376-927c-4fb06c557533-openstack-cell1\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.779771 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc49t\" (UniqueName: \"kubernetes.io/projected/b4296723-5d2c-4376-927c-4fb06c557533-kube-api-access-pc49t\") pod \"dnsmasq-dns-79ff8bb889-l8cq5\" (UID: \"b4296723-5d2c-4376-927c-4fb06c557533\") " pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.864884 4941 generic.go:334] "Generic (PLEG): container finished" podID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerID="05e4f23f35e4646ff2766557762474d1d69aece815a1a6ca444357c84bd883de" exitCode=0 Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.865428 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54977c-kbp9d" event={"ID":"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1","Type":"ContainerDied","Data":"05e4f23f35e4646ff2766557762474d1d69aece815a1a6ca444357c84bd883de"} Nov 30 08:22:10 crc kubenswrapper[4941]: I1130 08:22:10.945703 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.103923 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.159436 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-config\") pod \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.159491 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-sb\") pod \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.159544 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67gvm\" (UniqueName: \"kubernetes.io/projected/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-kube-api-access-67gvm\") pod \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.159769 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-dns-svc\") pod \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.159829 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-nb\") pod \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\" (UID: \"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1\") " Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.181842 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-kube-api-access-67gvm" (OuterVolumeSpecName: "kube-api-access-67gvm") pod "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" (UID: "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1"). InnerVolumeSpecName "kube-api-access-67gvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.240290 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" (UID: "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.247054 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-config" (OuterVolumeSpecName: "config") pod "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" (UID: "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.266993 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" (UID: "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.270356 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.270395 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.270406 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.270415 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67gvm\" (UniqueName: \"kubernetes.io/projected/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-kube-api-access-67gvm\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.279981 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" (UID: "05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.372108 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.546016 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79ff8bb889-l8cq5"] Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.879927 4941 generic.go:334] "Generic (PLEG): container finished" podID="b4296723-5d2c-4376-927c-4fb06c557533" containerID="7fef803dde164277c16c1f35228532444827d8335b960194fe566bac5a487303" exitCode=0 Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.880025 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" event={"ID":"b4296723-5d2c-4376-927c-4fb06c557533","Type":"ContainerDied","Data":"7fef803dde164277c16c1f35228532444827d8335b960194fe566bac5a487303"} Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.880479 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" event={"ID":"b4296723-5d2c-4376-927c-4fb06c557533","Type":"ContainerStarted","Data":"449a8dd0756591640c8f2191cedca1d6c4bd878e5ead4904c16911c2ef466f25"} Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.883879 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f54977c-kbp9d" event={"ID":"05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1","Type":"ContainerDied","Data":"a774a55184ed4d6906f7b8bf62068c02b0ebfe55eb6ab094a257fd7fb6db6560"} Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.883955 4941 scope.go:117] "RemoveContainer" containerID="05e4f23f35e4646ff2766557762474d1d69aece815a1a6ca444357c84bd883de" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.883968 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f54977c-kbp9d" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.978053 4941 scope.go:117] "RemoveContainer" containerID="95f57afa8092b4380f0ce947cb61dc00c885c915c417a99a3769654cb2a17239" Nov 30 08:22:11 crc kubenswrapper[4941]: I1130 08:22:11.996023 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f54977c-kbp9d"] Nov 30 08:22:12 crc kubenswrapper[4941]: I1130 08:22:12.011203 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f54977c-kbp9d"] Nov 30 08:22:12 crc kubenswrapper[4941]: I1130 08:22:12.899153 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" event={"ID":"b4296723-5d2c-4376-927c-4fb06c557533","Type":"ContainerStarted","Data":"abbf2ee3e7916f56c2fd32a322e1b99c95736c019ed6168438a02eee45c46124"} Nov 30 08:22:12 crc kubenswrapper[4941]: I1130 08:22:12.899730 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:12 crc kubenswrapper[4941]: I1130 08:22:12.936115 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" podStartSLOduration=2.936089863 podStartE2EDuration="2.936089863s" podCreationTimestamp="2025-11-30 08:22:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 08:22:12.929257453 +0000 UTC m=+5753.697429062" watchObservedRunningTime="2025-11-30 08:22:12.936089863 +0000 UTC m=+5753.704261472" Nov 30 08:22:13 crc kubenswrapper[4941]: I1130 08:22:13.535510 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" path="/var/lib/kubelet/pods/05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1/volumes" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.372067 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf"] Nov 30 08:22:17 crc kubenswrapper[4941]: E1130 08:22:17.373455 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerName="dnsmasq-dns" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.373474 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerName="dnsmasq-dns" Nov 30 08:22:17 crc kubenswrapper[4941]: E1130 08:22:17.373502 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerName="init" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.373511 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerName="init" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.373824 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="05ddc04f-c925-4a90-b4ca-3b9ee47c5cb1" containerName="dnsmasq-dns" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.374950 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.377766 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.378675 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.379026 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.379221 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.410522 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf"] Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.448464 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmn6c\" (UniqueName: \"kubernetes.io/projected/14bba754-3a7e-4d3e-a017-02842a5dc338-kube-api-access-jmn6c\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.448540 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.448603 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.448665 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.448718 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.549901 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmn6c\" (UniqueName: \"kubernetes.io/projected/14bba754-3a7e-4d3e-a017-02842a5dc338-kube-api-access-jmn6c\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.550209 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.550349 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.550514 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.550678 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.558907 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.562044 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.568103 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.572834 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.577123 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmn6c\" (UniqueName: \"kubernetes.io/projected/14bba754-3a7e-4d3e-a017-02842a5dc338-kube-api-access-jmn6c\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:17 crc kubenswrapper[4941]: I1130 08:22:17.703758 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:18 crc kubenswrapper[4941]: I1130 08:22:18.379831 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf"] Nov 30 08:22:18 crc kubenswrapper[4941]: I1130 08:22:18.981137 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" event={"ID":"14bba754-3a7e-4d3e-a017-02842a5dc338","Type":"ContainerStarted","Data":"1fed94b29e898e108193ce2f4d203e8c3f3014c21192c95f7850853f5f775caf"} Nov 30 08:22:20 crc kubenswrapper[4941]: I1130 08:22:20.947720 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79ff8bb889-l8cq5" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.030740 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d8f86bdc9-lhkx2"] Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.031081 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerName="dnsmasq-dns" containerID="cri-o://e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1" gracePeriod=10 Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.651650 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.773769 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp5vn\" (UniqueName: \"kubernetes.io/projected/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-kube-api-access-hp5vn\") pod \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.773830 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-openstack-cell1\") pod \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.773852 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-nb\") pod \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.773890 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-config\") pod \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.773913 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-dns-svc\") pod \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.774008 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-sb\") pod \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\" (UID: \"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee\") " Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.805873 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-kube-api-access-hp5vn" (OuterVolumeSpecName: "kube-api-access-hp5vn") pod "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" (UID: "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee"). InnerVolumeSpecName "kube-api-access-hp5vn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.834913 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" (UID: "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.841963 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-config" (OuterVolumeSpecName: "config") pod "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" (UID: "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.854663 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" (UID: "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.854856 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" (UID: "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.877071 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp5vn\" (UniqueName: \"kubernetes.io/projected/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-kube-api-access-hp5vn\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.877115 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.877130 4941 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-config\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.877142 4941 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.877152 4941 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.877944 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" (UID: "3510e7ee-c73f-4125-baf4-9eb54d1aa8ee"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:22:21 crc kubenswrapper[4941]: I1130 08:22:21.979421 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.033070 4941 generic.go:334] "Generic (PLEG): container finished" podID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerID="e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1" exitCode=0 Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.033131 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" event={"ID":"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee","Type":"ContainerDied","Data":"e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1"} Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.033169 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" event={"ID":"3510e7ee-c73f-4125-baf4-9eb54d1aa8ee","Type":"ContainerDied","Data":"4383485f246d82a779b90e17ae636b0443e09811dedf02fa478ac12c96cc1a17"} Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.033196 4941 scope.go:117] "RemoveContainer" containerID="e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.033436 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8f86bdc9-lhkx2" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.093738 4941 scope.go:117] "RemoveContainer" containerID="4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.099416 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d8f86bdc9-lhkx2"] Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.106511 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d8f86bdc9-lhkx2"] Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.201502 4941 scope.go:117] "RemoveContainer" containerID="e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1" Nov 30 08:22:22 crc kubenswrapper[4941]: E1130 08:22:22.206488 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1\": container with ID starting with e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1 not found: ID does not exist" containerID="e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.206662 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1"} err="failed to get container status \"e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1\": rpc error: code = NotFound desc = could not find container \"e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1\": container with ID starting with e25042d9fb0461919f6eb3a514c050f0ee772b9459ea37a37143ca09db1541a1 not found: ID does not exist" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.206798 4941 scope.go:117] "RemoveContainer" containerID="4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e" Nov 30 08:22:22 crc kubenswrapper[4941]: E1130 08:22:22.217543 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e\": container with ID starting with 4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e not found: ID does not exist" containerID="4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e" Nov 30 08:22:22 crc kubenswrapper[4941]: I1130 08:22:22.217906 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e"} err="failed to get container status \"4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e\": rpc error: code = NotFound desc = could not find container \"4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e\": container with ID starting with 4469112f7f700d7c70443fc5589724eba3304a0023b481877389f59d2a0b8e5e not found: ID does not exist" Nov 30 08:22:23 crc kubenswrapper[4941]: I1130 08:22:23.538814 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" path="/var/lib/kubelet/pods/3510e7ee-c73f-4125-baf4-9eb54d1aa8ee/volumes" Nov 30 08:22:28 crc kubenswrapper[4941]: I1130 08:22:28.793568 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:22:29 crc kubenswrapper[4941]: I1130 08:22:29.112583 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" event={"ID":"14bba754-3a7e-4d3e-a017-02842a5dc338","Type":"ContainerStarted","Data":"8c7b84bab9dadd58f0a76246b8672b40d77e3efaf461df603c53fc274691bf10"} Nov 30 08:22:29 crc kubenswrapper[4941]: I1130 08:22:29.135601 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" podStartSLOduration=1.726317114 podStartE2EDuration="12.135581157s" podCreationTimestamp="2025-11-30 08:22:17 +0000 UTC" firstStartedPulling="2025-11-30 08:22:18.381007356 +0000 UTC m=+5759.149178965" lastFinishedPulling="2025-11-30 08:22:28.790271399 +0000 UTC m=+5769.558443008" observedRunningTime="2025-11-30 08:22:29.130313394 +0000 UTC m=+5769.898485013" watchObservedRunningTime="2025-11-30 08:22:29.135581157 +0000 UTC m=+5769.903752766" Nov 30 08:22:32 crc kubenswrapper[4941]: I1130 08:22:32.978387 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:22:32 crc kubenswrapper[4941]: I1130 08:22:32.979245 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:22:43 crc kubenswrapper[4941]: I1130 08:22:43.276443 4941 generic.go:334] "Generic (PLEG): container finished" podID="14bba754-3a7e-4d3e-a017-02842a5dc338" containerID="8c7b84bab9dadd58f0a76246b8672b40d77e3efaf461df603c53fc274691bf10" exitCode=0 Nov 30 08:22:43 crc kubenswrapper[4941]: I1130 08:22:43.276549 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" event={"ID":"14bba754-3a7e-4d3e-a017-02842a5dc338","Type":"ContainerDied","Data":"8c7b84bab9dadd58f0a76246b8672b40d77e3efaf461df603c53fc274691bf10"} Nov 30 08:22:44 crc kubenswrapper[4941]: I1130 08:22:44.875829 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.027799 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmn6c\" (UniqueName: \"kubernetes.io/projected/14bba754-3a7e-4d3e-a017-02842a5dc338-kube-api-access-jmn6c\") pod \"14bba754-3a7e-4d3e-a017-02842a5dc338\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.027900 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ssh-key\") pod \"14bba754-3a7e-4d3e-a017-02842a5dc338\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.027962 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-inventory\") pod \"14bba754-3a7e-4d3e-a017-02842a5dc338\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.028014 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-pre-adoption-validation-combined-ca-bundle\") pod \"14bba754-3a7e-4d3e-a017-02842a5dc338\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.028180 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ceph\") pod \"14bba754-3a7e-4d3e-a017-02842a5dc338\" (UID: \"14bba754-3a7e-4d3e-a017-02842a5dc338\") " Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.033745 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ceph" (OuterVolumeSpecName: "ceph") pod "14bba754-3a7e-4d3e-a017-02842a5dc338" (UID: "14bba754-3a7e-4d3e-a017-02842a5dc338"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.034510 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14bba754-3a7e-4d3e-a017-02842a5dc338-kube-api-access-jmn6c" (OuterVolumeSpecName: "kube-api-access-jmn6c") pod "14bba754-3a7e-4d3e-a017-02842a5dc338" (UID: "14bba754-3a7e-4d3e-a017-02842a5dc338"). InnerVolumeSpecName "kube-api-access-jmn6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.036543 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "14bba754-3a7e-4d3e-a017-02842a5dc338" (UID: "14bba754-3a7e-4d3e-a017-02842a5dc338"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.058128 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "14bba754-3a7e-4d3e-a017-02842a5dc338" (UID: "14bba754-3a7e-4d3e-a017-02842a5dc338"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.070552 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-inventory" (OuterVolumeSpecName: "inventory") pod "14bba754-3a7e-4d3e-a017-02842a5dc338" (UID: "14bba754-3a7e-4d3e-a017-02842a5dc338"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.132997 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.133062 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmn6c\" (UniqueName: \"kubernetes.io/projected/14bba754-3a7e-4d3e-a017-02842a5dc338-kube-api-access-jmn6c\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.133077 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.133088 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.133101 4941 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14bba754-3a7e-4d3e-a017-02842a5dc338-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.304913 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" event={"ID":"14bba754-3a7e-4d3e-a017-02842a5dc338","Type":"ContainerDied","Data":"1fed94b29e898e108193ce2f4d203e8c3f3014c21192c95f7850853f5f775caf"} Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.305497 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fed94b29e898e108193ce2f4d203e8c3f3014c21192c95f7850853f5f775caf" Nov 30 08:22:45 crc kubenswrapper[4941]: I1130 08:22:45.305600 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf" Nov 30 08:22:50 crc kubenswrapper[4941]: I1130 08:22:50.410995 4941 scope.go:117] "RemoveContainer" containerID="f37b2b57ff53066afc62e5cd17e46d0f37076c07384c8bc8388ae70b9f2216e6" Nov 30 08:22:50 crc kubenswrapper[4941]: I1130 08:22:50.434827 4941 scope.go:117] "RemoveContainer" containerID="833b447cc45962121e5bfd11ecde96d19308c294322f4909cbf5452ab2cbcd43" Nov 30 08:22:50 crc kubenswrapper[4941]: I1130 08:22:50.485635 4941 scope.go:117] "RemoveContainer" containerID="7133537b41cd56b96793f775afe54f5a08395031b92eed01c2834743b1268106" Nov 30 08:22:52 crc kubenswrapper[4941]: I1130 08:22:52.051961 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3264-account-create-update-m2jhl"] Nov 30 08:22:52 crc kubenswrapper[4941]: I1130 08:22:52.064553 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-cqttg"] Nov 30 08:22:52 crc kubenswrapper[4941]: I1130 08:22:52.072906 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3264-account-create-update-m2jhl"] Nov 30 08:22:52 crc kubenswrapper[4941]: I1130 08:22:52.080671 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-cqttg"] Nov 30 08:22:53 crc kubenswrapper[4941]: I1130 08:22:53.563390 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eb8b92f-01d2-4330-b30e-36310de649ea" path="/var/lib/kubelet/pods/6eb8b92f-01d2-4330-b30e-36310de649ea/volumes" Nov 30 08:22:53 crc kubenswrapper[4941]: I1130 08:22:53.564723 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92489e01-470b-476d-8f67-07e883792c74" path="/var/lib/kubelet/pods/92489e01-470b-476d-8f67-07e883792c74/volumes" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.442006 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc"] Nov 30 08:22:55 crc kubenswrapper[4941]: E1130 08:22:55.443662 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerName="init" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.443760 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerName="init" Nov 30 08:22:55 crc kubenswrapper[4941]: E1130 08:22:55.443869 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14bba754-3a7e-4d3e-a017-02842a5dc338" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.443949 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="14bba754-3a7e-4d3e-a017-02842a5dc338" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 30 08:22:55 crc kubenswrapper[4941]: E1130 08:22:55.444050 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerName="dnsmasq-dns" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.444122 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerName="dnsmasq-dns" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.444471 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3510e7ee-c73f-4125-baf4-9eb54d1aa8ee" containerName="dnsmasq-dns" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.444586 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="14bba754-3a7e-4d3e-a017-02842a5dc338" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.445679 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.448170 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.448233 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.449713 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.450071 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.457301 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc"] Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.507888 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.508471 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.508526 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9gqs\" (UniqueName: \"kubernetes.io/projected/941a2803-c15d-4bd4-8348-e4cb3fd11d55-kube-api-access-c9gqs\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.508802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.508921 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.611866 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.613152 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.613244 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9gqs\" (UniqueName: \"kubernetes.io/projected/941a2803-c15d-4bd4-8348-e4cb3fd11d55-kube-api-access-c9gqs\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.613321 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.613382 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.621621 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.621918 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.621990 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.630987 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.642237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9gqs\" (UniqueName: \"kubernetes.io/projected/941a2803-c15d-4bd4-8348-e4cb3fd11d55-kube-api-access-c9gqs\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:55 crc kubenswrapper[4941]: I1130 08:22:55.776906 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:22:56 crc kubenswrapper[4941]: I1130 08:22:56.399717 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc"] Nov 30 08:22:56 crc kubenswrapper[4941]: I1130 08:22:56.423452 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" event={"ID":"941a2803-c15d-4bd4-8348-e4cb3fd11d55","Type":"ContainerStarted","Data":"15ee482079d4e9387cd63158d8ac13889500f0de17bce2d9e23054c2f7e151c9"} Nov 30 08:22:57 crc kubenswrapper[4941]: I1130 08:22:57.434365 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" event={"ID":"941a2803-c15d-4bd4-8348-e4cb3fd11d55","Type":"ContainerStarted","Data":"9afc48fb264f584aac1105144201b52f38b9cdd4abc4cd4be9a712904820c328"} Nov 30 08:22:57 crc kubenswrapper[4941]: I1130 08:22:57.459615 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" podStartSLOduration=1.991680109 podStartE2EDuration="2.45959322s" podCreationTimestamp="2025-11-30 08:22:55 +0000 UTC" firstStartedPulling="2025-11-30 08:22:56.398780117 +0000 UTC m=+5797.166951746" lastFinishedPulling="2025-11-30 08:22:56.866693248 +0000 UTC m=+5797.634864857" observedRunningTime="2025-11-30 08:22:57.450073425 +0000 UTC m=+5798.218245034" watchObservedRunningTime="2025-11-30 08:22:57.45959322 +0000 UTC m=+5798.227764829" Nov 30 08:23:02 crc kubenswrapper[4941]: I1130 08:23:02.979079 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:23:02 crc kubenswrapper[4941]: I1130 08:23:02.980351 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:23:02 crc kubenswrapper[4941]: I1130 08:23:02.980438 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:23:02 crc kubenswrapper[4941]: I1130 08:23:02.981583 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2b0ac757b4b509277e111501824da6d25da3cf0f9b1a7aee73120ac3b723944"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:23:02 crc kubenswrapper[4941]: I1130 08:23:02.981719 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://b2b0ac757b4b509277e111501824da6d25da3cf0f9b1a7aee73120ac3b723944" gracePeriod=600 Nov 30 08:23:03 crc kubenswrapper[4941]: I1130 08:23:03.594873 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="b2b0ac757b4b509277e111501824da6d25da3cf0f9b1a7aee73120ac3b723944" exitCode=0 Nov 30 08:23:03 crc kubenswrapper[4941]: I1130 08:23:03.595060 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"b2b0ac757b4b509277e111501824da6d25da3cf0f9b1a7aee73120ac3b723944"} Nov 30 08:23:03 crc kubenswrapper[4941]: I1130 08:23:03.595384 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890"} Nov 30 08:23:03 crc kubenswrapper[4941]: I1130 08:23:03.595425 4941 scope.go:117] "RemoveContainer" containerID="5b2b89210439f329efc2db095cbdcdf3533f523b0214bce1519507cf28eb2d9f" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.187977 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tz9jh"] Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.193182 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.213048 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tz9jh"] Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.337594 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkdmw\" (UniqueName: \"kubernetes.io/projected/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-kube-api-access-vkdmw\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.338071 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-catalog-content\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.338427 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-utilities\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.446703 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-utilities\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.447671 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-utilities\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.447866 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkdmw\" (UniqueName: \"kubernetes.io/projected/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-kube-api-access-vkdmw\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.448060 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-catalog-content\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.449988 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-catalog-content\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.480053 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkdmw\" (UniqueName: \"kubernetes.io/projected/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-kube-api-access-vkdmw\") pod \"certified-operators-tz9jh\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:16 crc kubenswrapper[4941]: I1130 08:23:16.551978 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:17 crc kubenswrapper[4941]: I1130 08:23:17.091053 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tz9jh"] Nov 30 08:23:17 crc kubenswrapper[4941]: W1130 08:23:17.093379 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod873eb4d2_4947_4fa9_b0d0_4842fb5031ed.slice/crio-b03c00868c2096c1b0252a24634ae5290d6b524396a111faf909b1ee1bc6ff91 WatchSource:0}: Error finding container b03c00868c2096c1b0252a24634ae5290d6b524396a111faf909b1ee1bc6ff91: Status 404 returned error can't find the container with id b03c00868c2096c1b0252a24634ae5290d6b524396a111faf909b1ee1bc6ff91 Nov 30 08:23:17 crc kubenswrapper[4941]: I1130 08:23:17.817681 4941 generic.go:334] "Generic (PLEG): container finished" podID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerID="962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a" exitCode=0 Nov 30 08:23:17 crc kubenswrapper[4941]: I1130 08:23:17.817752 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerDied","Data":"962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a"} Nov 30 08:23:17 crc kubenswrapper[4941]: I1130 08:23:17.818609 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerStarted","Data":"b03c00868c2096c1b0252a24634ae5290d6b524396a111faf909b1ee1bc6ff91"} Nov 30 08:23:18 crc kubenswrapper[4941]: I1130 08:23:18.838476 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerStarted","Data":"c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4"} Nov 30 08:23:19 crc kubenswrapper[4941]: I1130 08:23:19.852633 4941 generic.go:334] "Generic (PLEG): container finished" podID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerID="c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4" exitCode=0 Nov 30 08:23:19 crc kubenswrapper[4941]: I1130 08:23:19.852695 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerDied","Data":"c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4"} Nov 30 08:23:20 crc kubenswrapper[4941]: I1130 08:23:20.882957 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerStarted","Data":"c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420"} Nov 30 08:23:20 crc kubenswrapper[4941]: I1130 08:23:20.913696 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tz9jh" podStartSLOduration=2.337730937 podStartE2EDuration="4.913670534s" podCreationTimestamp="2025-11-30 08:23:16 +0000 UTC" firstStartedPulling="2025-11-30 08:23:17.820641193 +0000 UTC m=+5818.588812822" lastFinishedPulling="2025-11-30 08:23:20.39658081 +0000 UTC m=+5821.164752419" observedRunningTime="2025-11-30 08:23:20.908622048 +0000 UTC m=+5821.676793657" watchObservedRunningTime="2025-11-30 08:23:20.913670534 +0000 UTC m=+5821.681842143" Nov 30 08:23:26 crc kubenswrapper[4941]: I1130 08:23:26.553079 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:26 crc kubenswrapper[4941]: I1130 08:23:26.553824 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:26 crc kubenswrapper[4941]: I1130 08:23:26.622404 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:27 crc kubenswrapper[4941]: I1130 08:23:27.006014 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:28 crc kubenswrapper[4941]: I1130 08:23:28.995507 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tz9jh"] Nov 30 08:23:28 crc kubenswrapper[4941]: I1130 08:23:28.999532 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tz9jh" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="registry-server" containerID="cri-o://c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420" gracePeriod=2 Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.584864 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.691678 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-utilities\") pod \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.691881 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-catalog-content\") pod \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.691936 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkdmw\" (UniqueName: \"kubernetes.io/projected/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-kube-api-access-vkdmw\") pod \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\" (UID: \"873eb4d2-4947-4fa9-b0d0-4842fb5031ed\") " Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.692635 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-utilities" (OuterVolumeSpecName: "utilities") pod "873eb4d2-4947-4fa9-b0d0-4842fb5031ed" (UID: "873eb4d2-4947-4fa9-b0d0-4842fb5031ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.700176 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-kube-api-access-vkdmw" (OuterVolumeSpecName: "kube-api-access-vkdmw") pod "873eb4d2-4947-4fa9-b0d0-4842fb5031ed" (UID: "873eb4d2-4947-4fa9-b0d0-4842fb5031ed"). InnerVolumeSpecName "kube-api-access-vkdmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.748910 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "873eb4d2-4947-4fa9-b0d0-4842fb5031ed" (UID: "873eb4d2-4947-4fa9-b0d0-4842fb5031ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.795671 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.796118 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkdmw\" (UniqueName: \"kubernetes.io/projected/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-kube-api-access-vkdmw\") on node \"crc\" DevicePath \"\"" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.796134 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/873eb4d2-4947-4fa9-b0d0-4842fb5031ed-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.988079 4941 generic.go:334] "Generic (PLEG): container finished" podID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerID="c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420" exitCode=0 Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.988158 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerDied","Data":"c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420"} Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.988208 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tz9jh" event={"ID":"873eb4d2-4947-4fa9-b0d0-4842fb5031ed","Type":"ContainerDied","Data":"b03c00868c2096c1b0252a24634ae5290d6b524396a111faf909b1ee1bc6ff91"} Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.988240 4941 scope.go:117] "RemoveContainer" containerID="c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420" Nov 30 08:23:29 crc kubenswrapper[4941]: I1130 08:23:29.988499 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tz9jh" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.032806 4941 scope.go:117] "RemoveContainer" containerID="c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.051278 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tz9jh"] Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.064497 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tz9jh"] Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.080240 4941 scope.go:117] "RemoveContainer" containerID="962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.137683 4941 scope.go:117] "RemoveContainer" containerID="c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420" Nov 30 08:23:30 crc kubenswrapper[4941]: E1130 08:23:30.147689 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420\": container with ID starting with c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420 not found: ID does not exist" containerID="c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.147758 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420"} err="failed to get container status \"c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420\": rpc error: code = NotFound desc = could not find container \"c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420\": container with ID starting with c5c612ce07426d867ecdc069933d49d636928ad91cf505d17673d69baefed420 not found: ID does not exist" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.147791 4941 scope.go:117] "RemoveContainer" containerID="c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4" Nov 30 08:23:30 crc kubenswrapper[4941]: E1130 08:23:30.151901 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4\": container with ID starting with c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4 not found: ID does not exist" containerID="c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.152250 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4"} err="failed to get container status \"c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4\": rpc error: code = NotFound desc = could not find container \"c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4\": container with ID starting with c44df1924102c8afab7f3d1efe350e7b0170b6314749ef72b94e3f3e37629ba4 not found: ID does not exist" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.152393 4941 scope.go:117] "RemoveContainer" containerID="962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a" Nov 30 08:23:30 crc kubenswrapper[4941]: E1130 08:23:30.153417 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a\": container with ID starting with 962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a not found: ID does not exist" containerID="962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a" Nov 30 08:23:30 crc kubenswrapper[4941]: I1130 08:23:30.153455 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a"} err="failed to get container status \"962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a\": rpc error: code = NotFound desc = could not find container \"962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a\": container with ID starting with 962dc6c533412654a119211fab8d8c496eee4efc70320fa264694bcc88de382a not found: ID does not exist" Nov 30 08:23:31 crc kubenswrapper[4941]: I1130 08:23:31.551888 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" path="/var/lib/kubelet/pods/873eb4d2-4947-4fa9-b0d0-4842fb5031ed/volumes" Nov 30 08:23:36 crc kubenswrapper[4941]: I1130 08:23:36.046914 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-jnlt5"] Nov 30 08:23:36 crc kubenswrapper[4941]: I1130 08:23:36.057166 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-jnlt5"] Nov 30 08:23:37 crc kubenswrapper[4941]: I1130 08:23:37.535654 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22cf5e10-17a3-4530-9caf-cfa2f2b697bf" path="/var/lib/kubelet/pods/22cf5e10-17a3-4530-9caf-cfa2f2b697bf/volumes" Nov 30 08:23:50 crc kubenswrapper[4941]: I1130 08:23:50.775601 4941 scope.go:117] "RemoveContainer" containerID="1236390d21008415dabca0d67703784dc11afbda5295bf3ab6d81e0cba6b1e68" Nov 30 08:23:50 crc kubenswrapper[4941]: I1130 08:23:50.807596 4941 scope.go:117] "RemoveContainer" containerID="563b508e086b662d6201afd4dbec37e76089d0dda8721b275abf1157af9545b2" Nov 30 08:23:50 crc kubenswrapper[4941]: I1130 08:23:50.933727 4941 scope.go:117] "RemoveContainer" containerID="045f18d50926302208689fee6ebf15ec7fa81f34795c48723e981361d7598d48" Nov 30 08:25:32 crc kubenswrapper[4941]: I1130 08:25:32.978383 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:25:32 crc kubenswrapper[4941]: I1130 08:25:32.979167 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:26:02 crc kubenswrapper[4941]: I1130 08:26:02.978985 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:26:02 crc kubenswrapper[4941]: I1130 08:26:02.979846 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:26:32 crc kubenswrapper[4941]: I1130 08:26:32.980659 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:26:32 crc kubenswrapper[4941]: I1130 08:26:32.981434 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:26:32 crc kubenswrapper[4941]: I1130 08:26:32.981496 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:26:32 crc kubenswrapper[4941]: I1130 08:26:32.982580 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:26:32 crc kubenswrapper[4941]: I1130 08:26:32.982654 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" gracePeriod=600 Nov 30 08:26:33 crc kubenswrapper[4941]: E1130 08:26:33.109077 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:26:33 crc kubenswrapper[4941]: I1130 08:26:33.750373 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" exitCode=0 Nov 30 08:26:33 crc kubenswrapper[4941]: I1130 08:26:33.750420 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890"} Nov 30 08:26:33 crc kubenswrapper[4941]: I1130 08:26:33.750457 4941 scope.go:117] "RemoveContainer" containerID="b2b0ac757b4b509277e111501824da6d25da3cf0f9b1a7aee73120ac3b723944" Nov 30 08:26:33 crc kubenswrapper[4941]: I1130 08:26:33.751520 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:26:33 crc kubenswrapper[4941]: E1130 08:26:33.751948 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:26:47 crc kubenswrapper[4941]: I1130 08:26:47.522223 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:26:47 crc kubenswrapper[4941]: E1130 08:26:47.523425 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:26:58 crc kubenswrapper[4941]: I1130 08:26:58.522361 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:26:58 crc kubenswrapper[4941]: E1130 08:26:58.523400 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:27:10 crc kubenswrapper[4941]: I1130 08:27:10.523135 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:27:10 crc kubenswrapper[4941]: E1130 08:27:10.524269 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:27:23 crc kubenswrapper[4941]: I1130 08:27:23.521781 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:27:23 crc kubenswrapper[4941]: E1130 08:27:23.523200 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:27:31 crc kubenswrapper[4941]: I1130 08:27:31.056215 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cdae-account-create-update-n7gzv"] Nov 30 08:27:31 crc kubenswrapper[4941]: I1130 08:27:31.074747 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-g84q2"] Nov 30 08:27:31 crc kubenswrapper[4941]: I1130 08:27:31.087250 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cdae-account-create-update-n7gzv"] Nov 30 08:27:31 crc kubenswrapper[4941]: I1130 08:27:31.101172 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-g84q2"] Nov 30 08:27:31 crc kubenswrapper[4941]: I1130 08:27:31.545232 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f" path="/var/lib/kubelet/pods/7900951c-1fdb-49aa-b9ac-f5bc9f3edd7f/volumes" Nov 30 08:27:31 crc kubenswrapper[4941]: I1130 08:27:31.547156 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d098d395-0a98-4ba6-97eb-8422ead8b53e" path="/var/lib/kubelet/pods/d098d395-0a98-4ba6-97eb-8422ead8b53e/volumes" Nov 30 08:27:36 crc kubenswrapper[4941]: I1130 08:27:36.523089 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:27:36 crc kubenswrapper[4941]: E1130 08:27:36.524602 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:27:47 crc kubenswrapper[4941]: I1130 08:27:47.043246 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-58z6d"] Nov 30 08:27:47 crc kubenswrapper[4941]: I1130 08:27:47.061639 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-58z6d"] Nov 30 08:27:47 crc kubenswrapper[4941]: I1130 08:27:47.539908 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d" path="/var/lib/kubelet/pods/30bf8bc5-e669-4a3b-a037-d8ea9a4ef09d/volumes" Nov 30 08:27:49 crc kubenswrapper[4941]: I1130 08:27:49.533395 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:27:49 crc kubenswrapper[4941]: E1130 08:27:49.534145 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:27:51 crc kubenswrapper[4941]: I1130 08:27:51.159189 4941 scope.go:117] "RemoveContainer" containerID="45879672938cf8dff60d6053339269fa0550b37eafd1c095caee1c43b98064c2" Nov 30 08:27:51 crc kubenswrapper[4941]: I1130 08:27:51.218983 4941 scope.go:117] "RemoveContainer" containerID="05d5f83f3012558b13c3d5a36900613ebfed7792e612b64f086c7129fcbbd333" Nov 30 08:27:51 crc kubenswrapper[4941]: I1130 08:27:51.252768 4941 scope.go:117] "RemoveContainer" containerID="560645d6202a800a1d00eb338903074a980d47501e5f6701b63dc0cfcd645fe1" Nov 30 08:27:51 crc kubenswrapper[4941]: I1130 08:27:51.294478 4941 scope.go:117] "RemoveContainer" containerID="3a24397659f2d5ca117389c006c669b840dca3c9f1351fdefc6ea3ef6ae9f2c8" Nov 30 08:27:51 crc kubenswrapper[4941]: I1130 08:27:51.334372 4941 scope.go:117] "RemoveContainer" containerID="b2659ddb03c0b4a8892ebea7de61ac31aba1fa4ef90aa953a37b961938d0b571" Nov 30 08:27:51 crc kubenswrapper[4941]: I1130 08:27:51.394579 4941 scope.go:117] "RemoveContainer" containerID="71bde4bcdb52219de56f0629ce04d0747bec6a7055739c84cf8b409466d8c12f" Nov 30 08:28:01 crc kubenswrapper[4941]: I1130 08:28:01.523049 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:28:01 crc kubenswrapper[4941]: E1130 08:28:01.524564 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.555632 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h2k2t"] Nov 30 08:28:04 crc kubenswrapper[4941]: E1130 08:28:04.559245 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="extract-content" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.559269 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="extract-content" Nov 30 08:28:04 crc kubenswrapper[4941]: E1130 08:28:04.559317 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="registry-server" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.559339 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="registry-server" Nov 30 08:28:04 crc kubenswrapper[4941]: E1130 08:28:04.559356 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="extract-utilities" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.559363 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="extract-utilities" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.559614 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="873eb4d2-4947-4fa9-b0d0-4842fb5031ed" containerName="registry-server" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.561375 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.597858 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h2k2t"] Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.698962 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-catalog-content\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.699027 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxbpr\" (UniqueName: \"kubernetes.io/projected/ff48638b-afaf-4d79-857f-ea8509d632da-kube-api-access-zxbpr\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.699148 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-utilities\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.801496 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-catalog-content\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.801587 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxbpr\" (UniqueName: \"kubernetes.io/projected/ff48638b-afaf-4d79-857f-ea8509d632da-kube-api-access-zxbpr\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.801700 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-utilities\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.802514 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-utilities\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.802998 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-catalog-content\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.828121 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxbpr\" (UniqueName: \"kubernetes.io/projected/ff48638b-afaf-4d79-857f-ea8509d632da-kube-api-access-zxbpr\") pod \"redhat-marketplace-h2k2t\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:04 crc kubenswrapper[4941]: I1130 08:28:04.882560 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:05 crc kubenswrapper[4941]: I1130 08:28:05.453903 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h2k2t"] Nov 30 08:28:05 crc kubenswrapper[4941]: I1130 08:28:05.949267 4941 generic.go:334] "Generic (PLEG): container finished" podID="ff48638b-afaf-4d79-857f-ea8509d632da" containerID="ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d" exitCode=0 Nov 30 08:28:05 crc kubenswrapper[4941]: I1130 08:28:05.950699 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerDied","Data":"ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d"} Nov 30 08:28:05 crc kubenswrapper[4941]: I1130 08:28:05.950861 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerStarted","Data":"6c33c69016bb226cee401bfcb7db9e0778c447ee05ac1881ed895433a7150b0f"} Nov 30 08:28:05 crc kubenswrapper[4941]: I1130 08:28:05.953051 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:28:06 crc kubenswrapper[4941]: I1130 08:28:06.968385 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerStarted","Data":"58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe"} Nov 30 08:28:07 crc kubenswrapper[4941]: I1130 08:28:07.985429 4941 generic.go:334] "Generic (PLEG): container finished" podID="ff48638b-afaf-4d79-857f-ea8509d632da" containerID="58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe" exitCode=0 Nov 30 08:28:07 crc kubenswrapper[4941]: I1130 08:28:07.985971 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerDied","Data":"58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe"} Nov 30 08:28:09 crc kubenswrapper[4941]: I1130 08:28:08.999364 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerStarted","Data":"4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73"} Nov 30 08:28:09 crc kubenswrapper[4941]: I1130 08:28:09.028613 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h2k2t" podStartSLOduration=2.421191923 podStartE2EDuration="5.028582942s" podCreationTimestamp="2025-11-30 08:28:04 +0000 UTC" firstStartedPulling="2025-11-30 08:28:05.952774065 +0000 UTC m=+6106.720945664" lastFinishedPulling="2025-11-30 08:28:08.560164984 +0000 UTC m=+6109.328336683" observedRunningTime="2025-11-30 08:28:09.018050757 +0000 UTC m=+6109.786222416" watchObservedRunningTime="2025-11-30 08:28:09.028582942 +0000 UTC m=+6109.796754551" Nov 30 08:28:14 crc kubenswrapper[4941]: I1130 08:28:14.884199 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:14 crc kubenswrapper[4941]: I1130 08:28:14.885287 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:14 crc kubenswrapper[4941]: I1130 08:28:14.973622 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:15 crc kubenswrapper[4941]: I1130 08:28:15.130174 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:15 crc kubenswrapper[4941]: I1130 08:28:15.521777 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:28:15 crc kubenswrapper[4941]: E1130 08:28:15.522097 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:28:18 crc kubenswrapper[4941]: I1130 08:28:18.557058 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h2k2t"] Nov 30 08:28:18 crc kubenswrapper[4941]: I1130 08:28:18.558548 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h2k2t" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="registry-server" containerID="cri-o://4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73" gracePeriod=2 Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.075681 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.161345 4941 generic.go:334] "Generic (PLEG): container finished" podID="ff48638b-afaf-4d79-857f-ea8509d632da" containerID="4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73" exitCode=0 Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.161406 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerDied","Data":"4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73"} Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.161445 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h2k2t" event={"ID":"ff48638b-afaf-4d79-857f-ea8509d632da","Type":"ContainerDied","Data":"6c33c69016bb226cee401bfcb7db9e0778c447ee05ac1881ed895433a7150b0f"} Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.161467 4941 scope.go:117] "RemoveContainer" containerID="4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.161636 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h2k2t" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.194244 4941 scope.go:117] "RemoveContainer" containerID="58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.208482 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxbpr\" (UniqueName: \"kubernetes.io/projected/ff48638b-afaf-4d79-857f-ea8509d632da-kube-api-access-zxbpr\") pod \"ff48638b-afaf-4d79-857f-ea8509d632da\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.208734 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-catalog-content\") pod \"ff48638b-afaf-4d79-857f-ea8509d632da\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.209186 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-utilities\") pod \"ff48638b-afaf-4d79-857f-ea8509d632da\" (UID: \"ff48638b-afaf-4d79-857f-ea8509d632da\") " Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.211191 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-utilities" (OuterVolumeSpecName: "utilities") pod "ff48638b-afaf-4d79-857f-ea8509d632da" (UID: "ff48638b-afaf-4d79-857f-ea8509d632da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.221379 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff48638b-afaf-4d79-857f-ea8509d632da-kube-api-access-zxbpr" (OuterVolumeSpecName: "kube-api-access-zxbpr") pod "ff48638b-afaf-4d79-857f-ea8509d632da" (UID: "ff48638b-afaf-4d79-857f-ea8509d632da"). InnerVolumeSpecName "kube-api-access-zxbpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.232867 4941 scope.go:117] "RemoveContainer" containerID="ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.234642 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff48638b-afaf-4d79-857f-ea8509d632da" (UID: "ff48638b-afaf-4d79-857f-ea8509d632da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.312057 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.312110 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff48638b-afaf-4d79-857f-ea8509d632da-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.312121 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxbpr\" (UniqueName: \"kubernetes.io/projected/ff48638b-afaf-4d79-857f-ea8509d632da-kube-api-access-zxbpr\") on node \"crc\" DevicePath \"\"" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.314214 4941 scope.go:117] "RemoveContainer" containerID="4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73" Nov 30 08:28:19 crc kubenswrapper[4941]: E1130 08:28:19.314867 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73\": container with ID starting with 4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73 not found: ID does not exist" containerID="4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.314930 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73"} err="failed to get container status \"4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73\": rpc error: code = NotFound desc = could not find container \"4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73\": container with ID starting with 4de27ed44735edbfa3aca248dd0a2c6b133c03bb0eda73d611a7b940b49a9c73 not found: ID does not exist" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.314972 4941 scope.go:117] "RemoveContainer" containerID="58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe" Nov 30 08:28:19 crc kubenswrapper[4941]: E1130 08:28:19.315629 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe\": container with ID starting with 58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe not found: ID does not exist" containerID="58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.315690 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe"} err="failed to get container status \"58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe\": rpc error: code = NotFound desc = could not find container \"58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe\": container with ID starting with 58402b58f92d8814986512304348c1d828129acadeab584a9b8926286cf93bfe not found: ID does not exist" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.315742 4941 scope.go:117] "RemoveContainer" containerID="ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d" Nov 30 08:28:19 crc kubenswrapper[4941]: E1130 08:28:19.316105 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d\": container with ID starting with ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d not found: ID does not exist" containerID="ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.316131 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d"} err="failed to get container status \"ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d\": rpc error: code = NotFound desc = could not find container \"ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d\": container with ID starting with ba211eb46a7e3cda695db93856c566abdb103c30b412c31ee1a444b2c9f64d1d not found: ID does not exist" Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.544441 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h2k2t"] Nov 30 08:28:19 crc kubenswrapper[4941]: I1130 08:28:19.544517 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h2k2t"] Nov 30 08:28:19 crc kubenswrapper[4941]: E1130 08:28:19.684736 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff48638b_afaf_4d79_857f_ea8509d632da.slice\": RecentStats: unable to find data in memory cache]" Nov 30 08:28:21 crc kubenswrapper[4941]: I1130 08:28:21.548098 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" path="/var/lib/kubelet/pods/ff48638b-afaf-4d79-857f-ea8509d632da/volumes" Nov 30 08:28:29 crc kubenswrapper[4941]: I1130 08:28:29.544854 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:28:29 crc kubenswrapper[4941]: E1130 08:28:29.546963 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:28:44 crc kubenswrapper[4941]: I1130 08:28:44.523173 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:28:44 crc kubenswrapper[4941]: E1130 08:28:44.525132 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:28:56 crc kubenswrapper[4941]: I1130 08:28:56.523156 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:28:56 crc kubenswrapper[4941]: E1130 08:28:56.524458 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:29:10 crc kubenswrapper[4941]: I1130 08:29:10.522421 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:29:10 crc kubenswrapper[4941]: E1130 08:29:10.523956 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:29:21 crc kubenswrapper[4941]: I1130 08:29:21.521919 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:29:21 crc kubenswrapper[4941]: E1130 08:29:21.523210 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:29:36 crc kubenswrapper[4941]: I1130 08:29:36.522490 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:29:36 crc kubenswrapper[4941]: E1130 08:29:36.523753 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:29:47 crc kubenswrapper[4941]: I1130 08:29:47.523584 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:29:47 crc kubenswrapper[4941]: E1130 08:29:47.525166 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.154387 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl"] Nov 30 08:30:00 crc kubenswrapper[4941]: E1130 08:30:00.155981 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="extract-content" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.156008 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="extract-content" Nov 30 08:30:00 crc kubenswrapper[4941]: E1130 08:30:00.156073 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="extract-utilities" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.156086 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="extract-utilities" Nov 30 08:30:00 crc kubenswrapper[4941]: E1130 08:30:00.156111 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="registry-server" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.156125 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="registry-server" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.156589 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff48638b-afaf-4d79-857f-ea8509d632da" containerName="registry-server" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.157983 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.160544 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.162090 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-889nc\" (UniqueName: \"kubernetes.io/projected/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-kube-api-access-889nc\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.162296 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-secret-volume\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.162335 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-config-volume\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.162404 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.167857 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl"] Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.265716 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-secret-volume\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.265801 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-config-volume\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.265981 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-889nc\" (UniqueName: \"kubernetes.io/projected/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-kube-api-access-889nc\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.266836 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-config-volume\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.276925 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-secret-volume\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.283920 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-889nc\" (UniqueName: \"kubernetes.io/projected/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-kube-api-access-889nc\") pod \"collect-profiles-29408190-zcdcl\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.492881 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.522421 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:30:00 crc kubenswrapper[4941]: E1130 08:30:00.522663 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:30:00 crc kubenswrapper[4941]: I1130 08:30:00.994162 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl"] Nov 30 08:30:00 crc kubenswrapper[4941]: W1130 08:30:00.995628 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e4aa5e4_86a4_4e41_abbe_8eeac5f5597d.slice/crio-f251341f85a6562241937e53de2f74718b2679f5dc1e396e12fca6454f03e682 WatchSource:0}: Error finding container f251341f85a6562241937e53de2f74718b2679f5dc1e396e12fca6454f03e682: Status 404 returned error can't find the container with id f251341f85a6562241937e53de2f74718b2679f5dc1e396e12fca6454f03e682 Nov 30 08:30:01 crc kubenswrapper[4941]: I1130 08:30:01.550772 4941 generic.go:334] "Generic (PLEG): container finished" podID="7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" containerID="b28fd70c1644afe1ec19026a74f9edf9da3e058501a6e272f7813788fe1fb56f" exitCode=0 Nov 30 08:30:01 crc kubenswrapper[4941]: I1130 08:30:01.550871 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" event={"ID":"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d","Type":"ContainerDied","Data":"b28fd70c1644afe1ec19026a74f9edf9da3e058501a6e272f7813788fe1fb56f"} Nov 30 08:30:01 crc kubenswrapper[4941]: I1130 08:30:01.551119 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" event={"ID":"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d","Type":"ContainerStarted","Data":"f251341f85a6562241937e53de2f74718b2679f5dc1e396e12fca6454f03e682"} Nov 30 08:30:02 crc kubenswrapper[4941]: I1130 08:30:02.954495 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.123147 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-889nc\" (UniqueName: \"kubernetes.io/projected/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-kube-api-access-889nc\") pod \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.123398 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-secret-volume\") pod \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.123594 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-config-volume\") pod \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\" (UID: \"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d\") " Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.124575 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-config-volume" (OuterVolumeSpecName: "config-volume") pod "7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" (UID: "7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.130107 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" (UID: "7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.131459 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-kube-api-access-889nc" (OuterVolumeSpecName: "kube-api-access-889nc") pod "7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" (UID: "7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d"). InnerVolumeSpecName "kube-api-access-889nc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.227055 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.227196 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-889nc\" (UniqueName: \"kubernetes.io/projected/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-kube-api-access-889nc\") on node \"crc\" DevicePath \"\"" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.227214 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.609821 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" event={"ID":"7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d","Type":"ContainerDied","Data":"f251341f85a6562241937e53de2f74718b2679f5dc1e396e12fca6454f03e682"} Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.610252 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f251341f85a6562241937e53de2f74718b2679f5dc1e396e12fca6454f03e682" Nov 30 08:30:03 crc kubenswrapper[4941]: I1130 08:30:03.610646 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl" Nov 30 08:30:04 crc kubenswrapper[4941]: I1130 08:30:04.048249 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p"] Nov 30 08:30:04 crc kubenswrapper[4941]: I1130 08:30:04.059944 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408145-7q47p"] Nov 30 08:30:04 crc kubenswrapper[4941]: I1130 08:30:04.068722 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-jzlcr"] Nov 30 08:30:04 crc kubenswrapper[4941]: I1130 08:30:04.076521 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-8a71-account-create-update-9kc9v"] Nov 30 08:30:04 crc kubenswrapper[4941]: I1130 08:30:04.084235 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-8a71-account-create-update-9kc9v"] Nov 30 08:30:04 crc kubenswrapper[4941]: I1130 08:30:04.092206 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-jzlcr"] Nov 30 08:30:05 crc kubenswrapper[4941]: I1130 08:30:05.538664 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d" path="/var/lib/kubelet/pods/06a87cd6-ff47-48ed-b9cb-46bf8ce7d86d/volumes" Nov 30 08:30:05 crc kubenswrapper[4941]: I1130 08:30:05.539957 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29fb1557-dd9b-447b-aaa4-6bd9f2daf116" path="/var/lib/kubelet/pods/29fb1557-dd9b-447b-aaa4-6bd9f2daf116/volumes" Nov 30 08:30:05 crc kubenswrapper[4941]: I1130 08:30:05.540999 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c3a7561-0dd1-4f36-bd83-19ed4c763c29" path="/var/lib/kubelet/pods/5c3a7561-0dd1-4f36-bd83-19ed4c763c29/volumes" Nov 30 08:30:15 crc kubenswrapper[4941]: I1130 08:30:15.046019 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-s95pb"] Nov 30 08:30:15 crc kubenswrapper[4941]: I1130 08:30:15.058036 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-s95pb"] Nov 30 08:30:15 crc kubenswrapper[4941]: I1130 08:30:15.523467 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:30:15 crc kubenswrapper[4941]: E1130 08:30:15.524271 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:30:15 crc kubenswrapper[4941]: I1130 08:30:15.537101 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="398af6dc-0391-4164-9eab-d83d0451986b" path="/var/lib/kubelet/pods/398af6dc-0391-4164-9eab-d83d0451986b/volumes" Nov 30 08:30:28 crc kubenswrapper[4941]: I1130 08:30:28.522947 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:30:28 crc kubenswrapper[4941]: E1130 08:30:28.526161 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:30:34 crc kubenswrapper[4941]: I1130 08:30:34.109589 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-2dsp7"] Nov 30 08:30:34 crc kubenswrapper[4941]: I1130 08:30:34.149411 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-2dsp7"] Nov 30 08:30:35 crc kubenswrapper[4941]: I1130 08:30:35.039407 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-9c47-account-create-update-tfxnb"] Nov 30 08:30:35 crc kubenswrapper[4941]: I1130 08:30:35.047899 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-9c47-account-create-update-tfxnb"] Nov 30 08:30:35 crc kubenswrapper[4941]: I1130 08:30:35.535607 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bc4835a-9b82-4dcd-940d-24485fbb131c" path="/var/lib/kubelet/pods/2bc4835a-9b82-4dcd-940d-24485fbb131c/volumes" Nov 30 08:30:35 crc kubenswrapper[4941]: I1130 08:30:35.536824 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0b24026-5088-4a66-ad1c-5088d4611867" path="/var/lib/kubelet/pods/e0b24026-5088-4a66-ad1c-5088d4611867/volumes" Nov 30 08:30:42 crc kubenswrapper[4941]: I1130 08:30:42.523822 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:30:42 crc kubenswrapper[4941]: E1130 08:30:42.525024 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:30:46 crc kubenswrapper[4941]: I1130 08:30:46.041398 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-d9gzb"] Nov 30 08:30:46 crc kubenswrapper[4941]: I1130 08:30:46.056843 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-d9gzb"] Nov 30 08:30:47 crc kubenswrapper[4941]: I1130 08:30:47.537780 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7294eefc-5174-4c48-8e7f-c52377ded802" path="/var/lib/kubelet/pods/7294eefc-5174-4c48-8e7f-c52377ded802/volumes" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.607000 4941 scope.go:117] "RemoveContainer" containerID="e5a1913aeb7fccf12c6b2321ab6c8807beb3e3838d3bd13b1d979b8bc1dc10a8" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.653121 4941 scope.go:117] "RemoveContainer" containerID="bfa19e8ea86798874c8622baeda35e85aa66b48ffe384e407e17994844690749" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.702072 4941 scope.go:117] "RemoveContainer" containerID="3663f4c279b80c742c41b0cfb68ccad6d01b263ef6bef7e4f2a0ce5724e252f2" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.758872 4941 scope.go:117] "RemoveContainer" containerID="2dbadf78e233efbb05b40fe18411f4c678a011e4fa77c655a1a4e1e34fd5f8c6" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.797197 4941 scope.go:117] "RemoveContainer" containerID="69f99024a88e4abaa11c982d5a717e4d5d04a4de350d15593d18ffa25cdcd2ea" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.831305 4941 scope.go:117] "RemoveContainer" containerID="d24c30dd7f06fee25e56f62176ace5efc6467dd5c45ff75bef404aeadc35cb80" Nov 30 08:30:51 crc kubenswrapper[4941]: I1130 08:30:51.874596 4941 scope.go:117] "RemoveContainer" containerID="57f37f5a5f1b34f839c76bb40cc8911dfe9aa9164c0fe40c4516474b6db36d8d" Nov 30 08:30:55 crc kubenswrapper[4941]: I1130 08:30:55.522836 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:30:55 crc kubenswrapper[4941]: E1130 08:30:55.524061 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:31:06 crc kubenswrapper[4941]: I1130 08:31:06.523097 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:31:06 crc kubenswrapper[4941]: E1130 08:31:06.525023 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.101913 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g569g"] Nov 30 08:31:17 crc kubenswrapper[4941]: E1130 08:31:17.103250 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" containerName="collect-profiles" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.103268 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" containerName="collect-profiles" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.103578 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" containerName="collect-profiles" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.105963 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.115103 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g569g"] Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.137418 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-utilities\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.137620 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsdkl\" (UniqueName: \"kubernetes.io/projected/47e25469-bcbf-4b86-b00a-eddaad1690a7-kube-api-access-wsdkl\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.137740 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-catalog-content\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.239660 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-catalog-content\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.239771 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-utilities\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.239888 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsdkl\" (UniqueName: \"kubernetes.io/projected/47e25469-bcbf-4b86-b00a-eddaad1690a7-kube-api-access-wsdkl\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.240645 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-catalog-content\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.240814 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-utilities\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.266803 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsdkl\" (UniqueName: \"kubernetes.io/projected/47e25469-bcbf-4b86-b00a-eddaad1690a7-kube-api-access-wsdkl\") pod \"community-operators-g569g\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.455714 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:17 crc kubenswrapper[4941]: I1130 08:31:17.522405 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:31:17 crc kubenswrapper[4941]: E1130 08:31:17.522990 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:31:18 crc kubenswrapper[4941]: I1130 08:31:18.020742 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g569g"] Nov 30 08:31:18 crc kubenswrapper[4941]: I1130 08:31:18.538861 4941 generic.go:334] "Generic (PLEG): container finished" podID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerID="c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109" exitCode=0 Nov 30 08:31:18 crc kubenswrapper[4941]: I1130 08:31:18.538964 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g569g" event={"ID":"47e25469-bcbf-4b86-b00a-eddaad1690a7","Type":"ContainerDied","Data":"c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109"} Nov 30 08:31:18 crc kubenswrapper[4941]: I1130 08:31:18.539300 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g569g" event={"ID":"47e25469-bcbf-4b86-b00a-eddaad1690a7","Type":"ContainerStarted","Data":"458006ce00ef126b0c4ce7e8ed4e73e97bebe435425bd09651a6be5863921f51"} Nov 30 08:31:20 crc kubenswrapper[4941]: I1130 08:31:20.562673 4941 generic.go:334] "Generic (PLEG): container finished" podID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerID="31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb" exitCode=0 Nov 30 08:31:20 crc kubenswrapper[4941]: I1130 08:31:20.562795 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g569g" event={"ID":"47e25469-bcbf-4b86-b00a-eddaad1690a7","Type":"ContainerDied","Data":"31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb"} Nov 30 08:31:21 crc kubenswrapper[4941]: I1130 08:31:21.578891 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g569g" event={"ID":"47e25469-bcbf-4b86-b00a-eddaad1690a7","Type":"ContainerStarted","Data":"4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e"} Nov 30 08:31:21 crc kubenswrapper[4941]: I1130 08:31:21.616020 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g569g" podStartSLOduration=2.153903565 podStartE2EDuration="4.615995979s" podCreationTimestamp="2025-11-30 08:31:17 +0000 UTC" firstStartedPulling="2025-11-30 08:31:18.541412322 +0000 UTC m=+6299.309583931" lastFinishedPulling="2025-11-30 08:31:21.003504726 +0000 UTC m=+6301.771676345" observedRunningTime="2025-11-30 08:31:21.608004782 +0000 UTC m=+6302.376176391" watchObservedRunningTime="2025-11-30 08:31:21.615995979 +0000 UTC m=+6302.384167598" Nov 30 08:31:27 crc kubenswrapper[4941]: I1130 08:31:27.457149 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:27 crc kubenswrapper[4941]: I1130 08:31:27.457741 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:27 crc kubenswrapper[4941]: I1130 08:31:27.538828 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:27 crc kubenswrapper[4941]: I1130 08:31:27.742584 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:27 crc kubenswrapper[4941]: I1130 08:31:27.814343 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g569g"] Nov 30 08:31:29 crc kubenswrapper[4941]: I1130 08:31:29.531444 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:31:29 crc kubenswrapper[4941]: E1130 08:31:29.532947 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:31:29 crc kubenswrapper[4941]: I1130 08:31:29.704833 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g569g" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="registry-server" containerID="cri-o://4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e" gracePeriod=2 Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.429123 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.607788 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsdkl\" (UniqueName: \"kubernetes.io/projected/47e25469-bcbf-4b86-b00a-eddaad1690a7-kube-api-access-wsdkl\") pod \"47e25469-bcbf-4b86-b00a-eddaad1690a7\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.607847 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-utilities\") pod \"47e25469-bcbf-4b86-b00a-eddaad1690a7\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.607987 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-catalog-content\") pod \"47e25469-bcbf-4b86-b00a-eddaad1690a7\" (UID: \"47e25469-bcbf-4b86-b00a-eddaad1690a7\") " Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.609885 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-utilities" (OuterVolumeSpecName: "utilities") pod "47e25469-bcbf-4b86-b00a-eddaad1690a7" (UID: "47e25469-bcbf-4b86-b00a-eddaad1690a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.633551 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47e25469-bcbf-4b86-b00a-eddaad1690a7-kube-api-access-wsdkl" (OuterVolumeSpecName: "kube-api-access-wsdkl") pod "47e25469-bcbf-4b86-b00a-eddaad1690a7" (UID: "47e25469-bcbf-4b86-b00a-eddaad1690a7"). InnerVolumeSpecName "kube-api-access-wsdkl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.671847 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47e25469-bcbf-4b86-b00a-eddaad1690a7" (UID: "47e25469-bcbf-4b86-b00a-eddaad1690a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.710505 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsdkl\" (UniqueName: \"kubernetes.io/projected/47e25469-bcbf-4b86-b00a-eddaad1690a7-kube-api-access-wsdkl\") on node \"crc\" DevicePath \"\"" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.710543 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.710553 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47e25469-bcbf-4b86-b00a-eddaad1690a7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.715917 4941 generic.go:334] "Generic (PLEG): container finished" podID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerID="4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e" exitCode=0 Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.716088 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g569g" event={"ID":"47e25469-bcbf-4b86-b00a-eddaad1690a7","Type":"ContainerDied","Data":"4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e"} Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.716199 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g569g" event={"ID":"47e25469-bcbf-4b86-b00a-eddaad1690a7","Type":"ContainerDied","Data":"458006ce00ef126b0c4ce7e8ed4e73e97bebe435425bd09651a6be5863921f51"} Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.716295 4941 scope.go:117] "RemoveContainer" containerID="4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.716620 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g569g" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.742690 4941 scope.go:117] "RemoveContainer" containerID="31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.769185 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g569g"] Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.779177 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g569g"] Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.783115 4941 scope.go:117] "RemoveContainer" containerID="c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.819317 4941 scope.go:117] "RemoveContainer" containerID="4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e" Nov 30 08:31:30 crc kubenswrapper[4941]: E1130 08:31:30.819920 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e\": container with ID starting with 4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e not found: ID does not exist" containerID="4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.819971 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e"} err="failed to get container status \"4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e\": rpc error: code = NotFound desc = could not find container \"4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e\": container with ID starting with 4b79ef98f4adf331df2ede0b3ca46784e0092978a824a4ef1c16b4f39661d58e not found: ID does not exist" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.820007 4941 scope.go:117] "RemoveContainer" containerID="31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb" Nov 30 08:31:30 crc kubenswrapper[4941]: E1130 08:31:30.820415 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb\": container with ID starting with 31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb not found: ID does not exist" containerID="31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.820470 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb"} err="failed to get container status \"31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb\": rpc error: code = NotFound desc = could not find container \"31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb\": container with ID starting with 31a7cffd4bf18565a0e703e257c4188a207848ba44a4145672ef8f1376f5befb not found: ID does not exist" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.820511 4941 scope.go:117] "RemoveContainer" containerID="c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109" Nov 30 08:31:30 crc kubenswrapper[4941]: E1130 08:31:30.820795 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109\": container with ID starting with c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109 not found: ID does not exist" containerID="c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109" Nov 30 08:31:30 crc kubenswrapper[4941]: I1130 08:31:30.820820 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109"} err="failed to get container status \"c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109\": rpc error: code = NotFound desc = could not find container \"c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109\": container with ID starting with c36fe3a900172ecb4b9968e11ad2e65d49a1cbf77ca11aa5e5f2465c4c8ad109 not found: ID does not exist" Nov 30 08:31:31 crc kubenswrapper[4941]: I1130 08:31:31.537041 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" path="/var/lib/kubelet/pods/47e25469-bcbf-4b86-b00a-eddaad1690a7/volumes" Nov 30 08:31:40 crc kubenswrapper[4941]: I1130 08:31:40.521436 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:31:40 crc kubenswrapper[4941]: I1130 08:31:40.830469 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"24bc20ae8ebb5e63f354521469f692ee4d9f3f6a51f811a3e50661d6460945e1"} Nov 30 08:32:57 crc kubenswrapper[4941]: I1130 08:32:57.776776 4941 generic.go:334] "Generic (PLEG): container finished" podID="941a2803-c15d-4bd4-8348-e4cb3fd11d55" containerID="9afc48fb264f584aac1105144201b52f38b9cdd4abc4cd4be9a712904820c328" exitCode=0 Nov 30 08:32:57 crc kubenswrapper[4941]: I1130 08:32:57.776857 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" event={"ID":"941a2803-c15d-4bd4-8348-e4cb3fd11d55","Type":"ContainerDied","Data":"9afc48fb264f584aac1105144201b52f38b9cdd4abc4cd4be9a712904820c328"} Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.267233 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.436071 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-tripleo-cleanup-combined-ca-bundle\") pod \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.436160 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ssh-key\") pod \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.436248 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9gqs\" (UniqueName: \"kubernetes.io/projected/941a2803-c15d-4bd4-8348-e4cb3fd11d55-kube-api-access-c9gqs\") pod \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.436276 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ceph\") pod \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.436478 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-inventory\") pod \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\" (UID: \"941a2803-c15d-4bd4-8348-e4cb3fd11d55\") " Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.443232 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/941a2803-c15d-4bd4-8348-e4cb3fd11d55-kube-api-access-c9gqs" (OuterVolumeSpecName: "kube-api-access-c9gqs") pod "941a2803-c15d-4bd4-8348-e4cb3fd11d55" (UID: "941a2803-c15d-4bd4-8348-e4cb3fd11d55"). InnerVolumeSpecName "kube-api-access-c9gqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.444968 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "941a2803-c15d-4bd4-8348-e4cb3fd11d55" (UID: "941a2803-c15d-4bd4-8348-e4cb3fd11d55"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.446368 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ceph" (OuterVolumeSpecName: "ceph") pod "941a2803-c15d-4bd4-8348-e4cb3fd11d55" (UID: "941a2803-c15d-4bd4-8348-e4cb3fd11d55"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.481586 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-inventory" (OuterVolumeSpecName: "inventory") pod "941a2803-c15d-4bd4-8348-e4cb3fd11d55" (UID: "941a2803-c15d-4bd4-8348-e4cb3fd11d55"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.482588 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "941a2803-c15d-4bd4-8348-e4cb3fd11d55" (UID: "941a2803-c15d-4bd4-8348-e4cb3fd11d55"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.538683 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.538715 4941 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.538729 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.538741 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9gqs\" (UniqueName: \"kubernetes.io/projected/941a2803-c15d-4bd4-8348-e4cb3fd11d55-kube-api-access-c9gqs\") on node \"crc\" DevicePath \"\"" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.538752 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/941a2803-c15d-4bd4-8348-e4cb3fd11d55-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.801595 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" event={"ID":"941a2803-c15d-4bd4-8348-e4cb3fd11d55","Type":"ContainerDied","Data":"15ee482079d4e9387cd63158d8ac13889500f0de17bce2d9e23054c2f7e151c9"} Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.801645 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15ee482079d4e9387cd63158d8ac13889500f0de17bce2d9e23054c2f7e151c9" Nov 30 08:32:59 crc kubenswrapper[4941]: I1130 08:32:59.801714 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.959257 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-qsd9n"] Nov 30 08:33:05 crc kubenswrapper[4941]: E1130 08:33:05.960873 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="registry-server" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.960893 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="registry-server" Nov 30 08:33:05 crc kubenswrapper[4941]: E1130 08:33:05.960916 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="extract-content" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.960926 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="extract-content" Nov 30 08:33:05 crc kubenswrapper[4941]: E1130 08:33:05.960968 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="extract-utilities" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.960979 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="extract-utilities" Nov 30 08:33:05 crc kubenswrapper[4941]: E1130 08:33:05.961022 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="941a2803-c15d-4bd4-8348-e4cb3fd11d55" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.961031 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="941a2803-c15d-4bd4-8348-e4cb3fd11d55" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.961300 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="941a2803-c15d-4bd4-8348-e4cb3fd11d55" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.961352 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="47e25469-bcbf-4b86-b00a-eddaad1690a7" containerName="registry-server" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.962517 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.967661 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.967961 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.968149 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.968393 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:33:05 crc kubenswrapper[4941]: I1130 08:33:05.973222 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-qsd9n"] Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.019057 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ceph\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.019473 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvq64\" (UniqueName: \"kubernetes.io/projected/3215bae2-d62e-4687-bd5b-8d51625ba47c-kube-api-access-tvq64\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.019576 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.019713 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-inventory\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.019830 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.122675 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvq64\" (UniqueName: \"kubernetes.io/projected/3215bae2-d62e-4687-bd5b-8d51625ba47c-kube-api-access-tvq64\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.122752 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.122834 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-inventory\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.122930 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.123066 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ceph\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.131209 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.131649 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-inventory\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.132314 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.140639 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ceph\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.144276 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvq64\" (UniqueName: \"kubernetes.io/projected/3215bae2-d62e-4687-bd5b-8d51625ba47c-kube-api-access-tvq64\") pod \"bootstrap-openstack-openstack-cell1-qsd9n\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.310362 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.912364 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-qsd9n"] Nov 30 08:33:06 crc kubenswrapper[4941]: I1130 08:33:06.925490 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:33:07 crc kubenswrapper[4941]: I1130 08:33:07.899879 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" event={"ID":"3215bae2-d62e-4687-bd5b-8d51625ba47c","Type":"ContainerStarted","Data":"df5910ee05d05c208eee6eeb7fee0fcb93c27b9374970d75f0ab5a0c8f7f00b8"} Nov 30 08:33:07 crc kubenswrapper[4941]: I1130 08:33:07.902381 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" event={"ID":"3215bae2-d62e-4687-bd5b-8d51625ba47c","Type":"ContainerStarted","Data":"f84315bfea89cd0cea9a5d42a6e1558c04d1e8e0a9ef27587384374781be2aca"} Nov 30 08:33:07 crc kubenswrapper[4941]: I1130 08:33:07.928490 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" podStartSLOduration=2.397704785 podStartE2EDuration="2.928469952s" podCreationTimestamp="2025-11-30 08:33:05 +0000 UTC" firstStartedPulling="2025-11-30 08:33:06.925052425 +0000 UTC m=+6407.693224054" lastFinishedPulling="2025-11-30 08:33:07.455817562 +0000 UTC m=+6408.223989221" observedRunningTime="2025-11-30 08:33:07.925105168 +0000 UTC m=+6408.693276787" watchObservedRunningTime="2025-11-30 08:33:07.928469952 +0000 UTC m=+6408.696641561" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.048350 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rljgn"] Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.052984 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.095165 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwjnl\" (UniqueName: \"kubernetes.io/projected/fcbe6407-b753-4a63-abd4-48088946bd51-kube-api-access-bwjnl\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.095400 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-catalog-content\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.095534 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-utilities\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.100249 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rljgn"] Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.198474 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-catalog-content\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.198579 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-utilities\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.198697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwjnl\" (UniqueName: \"kubernetes.io/projected/fcbe6407-b753-4a63-abd4-48088946bd51-kube-api-access-bwjnl\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.199034 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-catalog-content\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.199108 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-utilities\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.224911 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwjnl\" (UniqueName: \"kubernetes.io/projected/fcbe6407-b753-4a63-abd4-48088946bd51-kube-api-access-bwjnl\") pod \"certified-operators-rljgn\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.440989 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:33:57 crc kubenswrapper[4941]: I1130 08:33:57.964634 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rljgn"] Nov 30 08:33:58 crc kubenswrapper[4941]: I1130 08:33:58.533250 4941 generic.go:334] "Generic (PLEG): container finished" podID="fcbe6407-b753-4a63-abd4-48088946bd51" containerID="bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e" exitCode=0 Nov 30 08:33:58 crc kubenswrapper[4941]: I1130 08:33:58.533319 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerDied","Data":"bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e"} Nov 30 08:33:58 crc kubenswrapper[4941]: I1130 08:33:58.533636 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerStarted","Data":"3a91a1599cb1f07894f7191fc70ba59d0305cf6d3e48f6b80d183d472a1885d4"} Nov 30 08:33:59 crc kubenswrapper[4941]: I1130 08:33:59.542855 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerStarted","Data":"3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58"} Nov 30 08:34:00 crc kubenswrapper[4941]: I1130 08:34:00.554620 4941 generic.go:334] "Generic (PLEG): container finished" podID="fcbe6407-b753-4a63-abd4-48088946bd51" containerID="3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58" exitCode=0 Nov 30 08:34:00 crc kubenswrapper[4941]: I1130 08:34:00.554736 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerDied","Data":"3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58"} Nov 30 08:34:01 crc kubenswrapper[4941]: I1130 08:34:01.567718 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerStarted","Data":"b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293"} Nov 30 08:34:01 crc kubenswrapper[4941]: I1130 08:34:01.590702 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rljgn" podStartSLOduration=2.092841501 podStartE2EDuration="4.590670941s" podCreationTimestamp="2025-11-30 08:33:57 +0000 UTC" firstStartedPulling="2025-11-30 08:33:58.535428531 +0000 UTC m=+6459.303600140" lastFinishedPulling="2025-11-30 08:34:01.033257961 +0000 UTC m=+6461.801429580" observedRunningTime="2025-11-30 08:34:01.583986914 +0000 UTC m=+6462.352158523" watchObservedRunningTime="2025-11-30 08:34:01.590670941 +0000 UTC m=+6462.358842580" Nov 30 08:34:02 crc kubenswrapper[4941]: I1130 08:34:02.979448 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:34:02 crc kubenswrapper[4941]: I1130 08:34:02.979541 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:34:07 crc kubenswrapper[4941]: I1130 08:34:07.441172 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:34:07 crc kubenswrapper[4941]: I1130 08:34:07.443749 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:34:07 crc kubenswrapper[4941]: I1130 08:34:07.498239 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:34:07 crc kubenswrapper[4941]: I1130 08:34:07.689320 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:34:07 crc kubenswrapper[4941]: I1130 08:34:07.756138 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rljgn"] Nov 30 08:34:09 crc kubenswrapper[4941]: I1130 08:34:09.660215 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rljgn" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="registry-server" containerID="cri-o://b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293" gracePeriod=2 Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.195068 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.312565 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwjnl\" (UniqueName: \"kubernetes.io/projected/fcbe6407-b753-4a63-abd4-48088946bd51-kube-api-access-bwjnl\") pod \"fcbe6407-b753-4a63-abd4-48088946bd51\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.312708 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-utilities\") pod \"fcbe6407-b753-4a63-abd4-48088946bd51\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.312884 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-catalog-content\") pod \"fcbe6407-b753-4a63-abd4-48088946bd51\" (UID: \"fcbe6407-b753-4a63-abd4-48088946bd51\") " Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.314421 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-utilities" (OuterVolumeSpecName: "utilities") pod "fcbe6407-b753-4a63-abd4-48088946bd51" (UID: "fcbe6407-b753-4a63-abd4-48088946bd51"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.321614 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcbe6407-b753-4a63-abd4-48088946bd51-kube-api-access-bwjnl" (OuterVolumeSpecName: "kube-api-access-bwjnl") pod "fcbe6407-b753-4a63-abd4-48088946bd51" (UID: "fcbe6407-b753-4a63-abd4-48088946bd51"). InnerVolumeSpecName "kube-api-access-bwjnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.371030 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcbe6407-b753-4a63-abd4-48088946bd51" (UID: "fcbe6407-b753-4a63-abd4-48088946bd51"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.415190 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwjnl\" (UniqueName: \"kubernetes.io/projected/fcbe6407-b753-4a63-abd4-48088946bd51-kube-api-access-bwjnl\") on node \"crc\" DevicePath \"\"" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.415234 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.415247 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcbe6407-b753-4a63-abd4-48088946bd51-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.672100 4941 generic.go:334] "Generic (PLEG): container finished" podID="fcbe6407-b753-4a63-abd4-48088946bd51" containerID="b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293" exitCode=0 Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.672155 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerDied","Data":"b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293"} Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.672191 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rljgn" event={"ID":"fcbe6407-b753-4a63-abd4-48088946bd51","Type":"ContainerDied","Data":"3a91a1599cb1f07894f7191fc70ba59d0305cf6d3e48f6b80d183d472a1885d4"} Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.672224 4941 scope.go:117] "RemoveContainer" containerID="b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.672300 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rljgn" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.709048 4941 scope.go:117] "RemoveContainer" containerID="3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.715040 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rljgn"] Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.726152 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rljgn"] Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.746278 4941 scope.go:117] "RemoveContainer" containerID="bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.791004 4941 scope.go:117] "RemoveContainer" containerID="b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293" Nov 30 08:34:10 crc kubenswrapper[4941]: E1130 08:34:10.791524 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293\": container with ID starting with b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293 not found: ID does not exist" containerID="b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.791578 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293"} err="failed to get container status \"b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293\": rpc error: code = NotFound desc = could not find container \"b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293\": container with ID starting with b0eaa6087a1062208f70f40501346d4a0c3146ec3d02ccb099e18f8b74ed4293 not found: ID does not exist" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.791610 4941 scope.go:117] "RemoveContainer" containerID="3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58" Nov 30 08:34:10 crc kubenswrapper[4941]: E1130 08:34:10.792077 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58\": container with ID starting with 3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58 not found: ID does not exist" containerID="3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.792132 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58"} err="failed to get container status \"3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58\": rpc error: code = NotFound desc = could not find container \"3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58\": container with ID starting with 3ce4bce49db03c8aec5bb39a2d5ad2721f75051d651b46aa0ca511c4184eed58 not found: ID does not exist" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.792165 4941 scope.go:117] "RemoveContainer" containerID="bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e" Nov 30 08:34:10 crc kubenswrapper[4941]: E1130 08:34:10.792688 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e\": container with ID starting with bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e not found: ID does not exist" containerID="bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e" Nov 30 08:34:10 crc kubenswrapper[4941]: I1130 08:34:10.792721 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e"} err="failed to get container status \"bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e\": rpc error: code = NotFound desc = could not find container \"bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e\": container with ID starting with bb4079da1bb5a279246d422af098915c2cb5666c2f79ceed9b59f7ed2a0e409e not found: ID does not exist" Nov 30 08:34:11 crc kubenswrapper[4941]: I1130 08:34:11.536941 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" path="/var/lib/kubelet/pods/fcbe6407-b753-4a63-abd4-48088946bd51/volumes" Nov 30 08:34:32 crc kubenswrapper[4941]: I1130 08:34:32.978917 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:34:32 crc kubenswrapper[4941]: I1130 08:34:32.980043 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:35:02 crc kubenswrapper[4941]: I1130 08:35:02.979317 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:35:02 crc kubenswrapper[4941]: I1130 08:35:02.980125 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:35:02 crc kubenswrapper[4941]: I1130 08:35:02.980197 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:35:02 crc kubenswrapper[4941]: I1130 08:35:02.981496 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"24bc20ae8ebb5e63f354521469f692ee4d9f3f6a51f811a3e50661d6460945e1"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:35:02 crc kubenswrapper[4941]: I1130 08:35:02.981576 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://24bc20ae8ebb5e63f354521469f692ee4d9f3f6a51f811a3e50661d6460945e1" gracePeriod=600 Nov 30 08:35:03 crc kubenswrapper[4941]: I1130 08:35:03.357099 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="24bc20ae8ebb5e63f354521469f692ee4d9f3f6a51f811a3e50661d6460945e1" exitCode=0 Nov 30 08:35:03 crc kubenswrapper[4941]: I1130 08:35:03.357178 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"24bc20ae8ebb5e63f354521469f692ee4d9f3f6a51f811a3e50661d6460945e1"} Nov 30 08:35:03 crc kubenswrapper[4941]: I1130 08:35:03.357797 4941 scope.go:117] "RemoveContainer" containerID="88fc9ea9562cb51b3807ba918d1a5b12121cd6f06cd984dcd9a14c33a714b890" Nov 30 08:35:04 crc kubenswrapper[4941]: I1130 08:35:04.375281 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499"} Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.480283 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nbknp"] Nov 30 08:35:51 crc kubenswrapper[4941]: E1130 08:35:51.483234 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="registry-server" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.483425 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="registry-server" Nov 30 08:35:51 crc kubenswrapper[4941]: E1130 08:35:51.483563 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="extract-content" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.483642 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="extract-content" Nov 30 08:35:51 crc kubenswrapper[4941]: E1130 08:35:51.483740 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="extract-utilities" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.483817 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="extract-utilities" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.484217 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcbe6407-b753-4a63-abd4-48088946bd51" containerName="registry-server" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.486494 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.500047 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbknp"] Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.531099 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.531155 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h96cn\" (UniqueName: \"kubernetes.io/projected/b029a865-4ea3-489c-86cc-efe895777abd-kube-api-access-h96cn\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.531224 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-utilities\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.633806 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.633884 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h96cn\" (UniqueName: \"kubernetes.io/projected/b029a865-4ea3-489c-86cc-efe895777abd-kube-api-access-h96cn\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.633955 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-utilities\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.634370 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.634825 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-utilities\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.669922 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h96cn\" (UniqueName: \"kubernetes.io/projected/b029a865-4ea3-489c-86cc-efe895777abd-kube-api-access-h96cn\") pod \"redhat-operators-nbknp\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:51 crc kubenswrapper[4941]: I1130 08:35:51.827909 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:35:52 crc kubenswrapper[4941]: I1130 08:35:52.412060 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbknp"] Nov 30 08:35:53 crc kubenswrapper[4941]: I1130 08:35:53.068065 4941 generic.go:334] "Generic (PLEG): container finished" podID="b029a865-4ea3-489c-86cc-efe895777abd" containerID="200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35" exitCode=0 Nov 30 08:35:53 crc kubenswrapper[4941]: I1130 08:35:53.068143 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerDied","Data":"200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35"} Nov 30 08:35:53 crc kubenswrapper[4941]: I1130 08:35:53.069521 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerStarted","Data":"35bab5d1526f0599d884727943bf2f2b5e903d712894a5415e84a7a7ea042831"} Nov 30 08:35:54 crc kubenswrapper[4941]: I1130 08:35:54.085402 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerStarted","Data":"77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97"} Nov 30 08:35:57 crc kubenswrapper[4941]: I1130 08:35:57.130686 4941 generic.go:334] "Generic (PLEG): container finished" podID="b029a865-4ea3-489c-86cc-efe895777abd" containerID="77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97" exitCode=0 Nov 30 08:35:57 crc kubenswrapper[4941]: I1130 08:35:57.130787 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerDied","Data":"77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97"} Nov 30 08:35:58 crc kubenswrapper[4941]: I1130 08:35:58.151171 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerStarted","Data":"c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13"} Nov 30 08:35:58 crc kubenswrapper[4941]: I1130 08:35:58.177386 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nbknp" podStartSLOduration=2.45899686 podStartE2EDuration="7.177352917s" podCreationTimestamp="2025-11-30 08:35:51 +0000 UTC" firstStartedPulling="2025-11-30 08:35:53.07108781 +0000 UTC m=+6573.839259419" lastFinishedPulling="2025-11-30 08:35:57.789443867 +0000 UTC m=+6578.557615476" observedRunningTime="2025-11-30 08:35:58.171557908 +0000 UTC m=+6578.939729517" watchObservedRunningTime="2025-11-30 08:35:58.177352917 +0000 UTC m=+6578.945524516" Nov 30 08:36:01 crc kubenswrapper[4941]: I1130 08:36:01.828371 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:36:01 crc kubenswrapper[4941]: I1130 08:36:01.828971 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:36:02 crc kubenswrapper[4941]: I1130 08:36:02.882989 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nbknp" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="registry-server" probeResult="failure" output=< Nov 30 08:36:02 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:36:02 crc kubenswrapper[4941]: > Nov 30 08:36:11 crc kubenswrapper[4941]: I1130 08:36:11.907304 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:36:11 crc kubenswrapper[4941]: I1130 08:36:11.994719 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:36:13 crc kubenswrapper[4941]: I1130 08:36:13.222727 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbknp"] Nov 30 08:36:13 crc kubenswrapper[4941]: I1130 08:36:13.332835 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nbknp" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="registry-server" containerID="cri-o://c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13" gracePeriod=2 Nov 30 08:36:13 crc kubenswrapper[4941]: I1130 08:36:13.889458 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.010468 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content\") pod \"b029a865-4ea3-489c-86cc-efe895777abd\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.010618 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-utilities\") pod \"b029a865-4ea3-489c-86cc-efe895777abd\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.010849 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h96cn\" (UniqueName: \"kubernetes.io/projected/b029a865-4ea3-489c-86cc-efe895777abd-kube-api-access-h96cn\") pod \"b029a865-4ea3-489c-86cc-efe895777abd\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.012020 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-utilities" (OuterVolumeSpecName: "utilities") pod "b029a865-4ea3-489c-86cc-efe895777abd" (UID: "b029a865-4ea3-489c-86cc-efe895777abd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.028690 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b029a865-4ea3-489c-86cc-efe895777abd-kube-api-access-h96cn" (OuterVolumeSpecName: "kube-api-access-h96cn") pod "b029a865-4ea3-489c-86cc-efe895777abd" (UID: "b029a865-4ea3-489c-86cc-efe895777abd"). InnerVolumeSpecName "kube-api-access-h96cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.120545 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h96cn\" (UniqueName: \"kubernetes.io/projected/b029a865-4ea3-489c-86cc-efe895777abd-kube-api-access-h96cn\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.120597 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.322568 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b029a865-4ea3-489c-86cc-efe895777abd" (UID: "b029a865-4ea3-489c-86cc-efe895777abd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.323249 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content\") pod \"b029a865-4ea3-489c-86cc-efe895777abd\" (UID: \"b029a865-4ea3-489c-86cc-efe895777abd\") " Nov 30 08:36:14 crc kubenswrapper[4941]: W1130 08:36:14.323475 4941 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/b029a865-4ea3-489c-86cc-efe895777abd/volumes/kubernetes.io~empty-dir/catalog-content Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.323488 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b029a865-4ea3-489c-86cc-efe895777abd" (UID: "b029a865-4ea3-489c-86cc-efe895777abd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.323969 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b029a865-4ea3-489c-86cc-efe895777abd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.355586 4941 generic.go:334] "Generic (PLEG): container finished" podID="b029a865-4ea3-489c-86cc-efe895777abd" containerID="c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13" exitCode=0 Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.355634 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbknp" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.355659 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerDied","Data":"c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13"} Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.355732 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbknp" event={"ID":"b029a865-4ea3-489c-86cc-efe895777abd","Type":"ContainerDied","Data":"35bab5d1526f0599d884727943bf2f2b5e903d712894a5415e84a7a7ea042831"} Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.355754 4941 scope.go:117] "RemoveContainer" containerID="c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.382608 4941 scope.go:117] "RemoveContainer" containerID="77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.418767 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbknp"] Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.421994 4941 scope.go:117] "RemoveContainer" containerID="200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.427179 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nbknp"] Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.459109 4941 scope.go:117] "RemoveContainer" containerID="c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13" Nov 30 08:36:14 crc kubenswrapper[4941]: E1130 08:36:14.459659 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13\": container with ID starting with c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13 not found: ID does not exist" containerID="c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.459710 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13"} err="failed to get container status \"c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13\": rpc error: code = NotFound desc = could not find container \"c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13\": container with ID starting with c0656de2255622aefbf73aef611b70a5f89e64cf804cb55c13853c0bde06fd13 not found: ID does not exist" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.459744 4941 scope.go:117] "RemoveContainer" containerID="77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97" Nov 30 08:36:14 crc kubenswrapper[4941]: E1130 08:36:14.460028 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97\": container with ID starting with 77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97 not found: ID does not exist" containerID="77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.460068 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97"} err="failed to get container status \"77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97\": rpc error: code = NotFound desc = could not find container \"77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97\": container with ID starting with 77f851f73831bfa06955ce15ac2d22ba4e464cbf546a3e19ca1bddf1192ada97 not found: ID does not exist" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.460118 4941 scope.go:117] "RemoveContainer" containerID="200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35" Nov 30 08:36:14 crc kubenswrapper[4941]: E1130 08:36:14.460371 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35\": container with ID starting with 200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35 not found: ID does not exist" containerID="200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35" Nov 30 08:36:14 crc kubenswrapper[4941]: I1130 08:36:14.460396 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35"} err="failed to get container status \"200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35\": rpc error: code = NotFound desc = could not find container \"200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35\": container with ID starting with 200756328c21a02bb7f7b1061f231270ec07f047393cd190e05a03c6da0ddd35 not found: ID does not exist" Nov 30 08:36:14 crc kubenswrapper[4941]: E1130 08:36:14.535294 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb029a865_4ea3_489c_86cc_efe895777abd.slice/crio-35bab5d1526f0599d884727943bf2f2b5e903d712894a5415e84a7a7ea042831\": RecentStats: unable to find data in memory cache]" Nov 30 08:36:15 crc kubenswrapper[4941]: I1130 08:36:15.552250 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b029a865-4ea3-489c-86cc-efe895777abd" path="/var/lib/kubelet/pods/b029a865-4ea3-489c-86cc-efe895777abd/volumes" Nov 30 08:36:22 crc kubenswrapper[4941]: I1130 08:36:22.444276 4941 generic.go:334] "Generic (PLEG): container finished" podID="3215bae2-d62e-4687-bd5b-8d51625ba47c" containerID="df5910ee05d05c208eee6eeb7fee0fcb93c27b9374970d75f0ab5a0c8f7f00b8" exitCode=0 Nov 30 08:36:22 crc kubenswrapper[4941]: I1130 08:36:22.444380 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" event={"ID":"3215bae2-d62e-4687-bd5b-8d51625ba47c","Type":"ContainerDied","Data":"df5910ee05d05c208eee6eeb7fee0fcb93c27b9374970d75f0ab5a0c8f7f00b8"} Nov 30 08:36:23 crc kubenswrapper[4941]: I1130 08:36:23.965847 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.080047 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvq64\" (UniqueName: \"kubernetes.io/projected/3215bae2-d62e-4687-bd5b-8d51625ba47c-kube-api-access-tvq64\") pod \"3215bae2-d62e-4687-bd5b-8d51625ba47c\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.080306 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-inventory\") pod \"3215bae2-d62e-4687-bd5b-8d51625ba47c\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.080441 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ssh-key\") pod \"3215bae2-d62e-4687-bd5b-8d51625ba47c\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.080475 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-bootstrap-combined-ca-bundle\") pod \"3215bae2-d62e-4687-bd5b-8d51625ba47c\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.080540 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ceph\") pod \"3215bae2-d62e-4687-bd5b-8d51625ba47c\" (UID: \"3215bae2-d62e-4687-bd5b-8d51625ba47c\") " Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.087438 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ceph" (OuterVolumeSpecName: "ceph") pod "3215bae2-d62e-4687-bd5b-8d51625ba47c" (UID: "3215bae2-d62e-4687-bd5b-8d51625ba47c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.090041 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3215bae2-d62e-4687-bd5b-8d51625ba47c-kube-api-access-tvq64" (OuterVolumeSpecName: "kube-api-access-tvq64") pod "3215bae2-d62e-4687-bd5b-8d51625ba47c" (UID: "3215bae2-d62e-4687-bd5b-8d51625ba47c"). InnerVolumeSpecName "kube-api-access-tvq64". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.094558 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3215bae2-d62e-4687-bd5b-8d51625ba47c" (UID: "3215bae2-d62e-4687-bd5b-8d51625ba47c"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.119093 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3215bae2-d62e-4687-bd5b-8d51625ba47c" (UID: "3215bae2-d62e-4687-bd5b-8d51625ba47c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.141358 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-inventory" (OuterVolumeSpecName: "inventory") pod "3215bae2-d62e-4687-bd5b-8d51625ba47c" (UID: "3215bae2-d62e-4687-bd5b-8d51625ba47c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.186004 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.186172 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.186254 4941 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.186351 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3215bae2-d62e-4687-bd5b-8d51625ba47c-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.186448 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvq64\" (UniqueName: \"kubernetes.io/projected/3215bae2-d62e-4687-bd5b-8d51625ba47c-kube-api-access-tvq64\") on node \"crc\" DevicePath \"\"" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.474887 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" event={"ID":"3215bae2-d62e-4687-bd5b-8d51625ba47c","Type":"ContainerDied","Data":"f84315bfea89cd0cea9a5d42a6e1558c04d1e8e0a9ef27587384374781be2aca"} Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.474964 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f84315bfea89cd0cea9a5d42a6e1558c04d1e8e0a9ef27587384374781be2aca" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.475039 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-qsd9n" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.589434 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-mmdxs"] Nov 30 08:36:24 crc kubenswrapper[4941]: E1130 08:36:24.590124 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3215bae2-d62e-4687-bd5b-8d51625ba47c" containerName="bootstrap-openstack-openstack-cell1" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.590144 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3215bae2-d62e-4687-bd5b-8d51625ba47c" containerName="bootstrap-openstack-openstack-cell1" Nov 30 08:36:24 crc kubenswrapper[4941]: E1130 08:36:24.590195 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="extract-content" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.590203 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="extract-content" Nov 30 08:36:24 crc kubenswrapper[4941]: E1130 08:36:24.590222 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="registry-server" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.590229 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="registry-server" Nov 30 08:36:24 crc kubenswrapper[4941]: E1130 08:36:24.590247 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="extract-utilities" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.590254 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="extract-utilities" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.590461 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3215bae2-d62e-4687-bd5b-8d51625ba47c" containerName="bootstrap-openstack-openstack-cell1" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.590491 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b029a865-4ea3-489c-86cc-efe895777abd" containerName="registry-server" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.591485 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.596841 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.596910 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.598059 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-mmdxs"] Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.600206 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.607153 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.710294 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-inventory\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.710501 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ceph\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.710824 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ssh-key\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.710996 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfvc4\" (UniqueName: \"kubernetes.io/projected/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-kube-api-access-dfvc4\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.812987 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ceph\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.813554 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ssh-key\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.813731 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfvc4\" (UniqueName: \"kubernetes.io/projected/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-kube-api-access-dfvc4\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.814018 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-inventory\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.822990 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ceph\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.825485 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-inventory\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.829779 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ssh-key\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.871604 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfvc4\" (UniqueName: \"kubernetes.io/projected/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-kube-api-access-dfvc4\") pod \"download-cache-openstack-openstack-cell1-mmdxs\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:24 crc kubenswrapper[4941]: I1130 08:36:24.930881 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:36:25 crc kubenswrapper[4941]: I1130 08:36:25.649554 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-mmdxs"] Nov 30 08:36:26 crc kubenswrapper[4941]: I1130 08:36:26.498653 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" event={"ID":"00aafada-6a3d-4ea6-8e0d-e7090d7acd63","Type":"ContainerStarted","Data":"003af0f5fdfafa567a043ae8588d5632195e64eba1f35149f25667515411aba2"} Nov 30 08:36:27 crc kubenswrapper[4941]: I1130 08:36:27.510243 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" event={"ID":"00aafada-6a3d-4ea6-8e0d-e7090d7acd63","Type":"ContainerStarted","Data":"c0f836a821f4bfcc8996d22fb6fa7b2ebfaddcbd4660a8f66383f4fb2611b109"} Nov 30 08:36:27 crc kubenswrapper[4941]: I1130 08:36:27.539100 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" podStartSLOduration=2.921880584 podStartE2EDuration="3.539078652s" podCreationTimestamp="2025-11-30 08:36:24 +0000 UTC" firstStartedPulling="2025-11-30 08:36:25.655949874 +0000 UTC m=+6606.424121483" lastFinishedPulling="2025-11-30 08:36:26.273147942 +0000 UTC m=+6607.041319551" observedRunningTime="2025-11-30 08:36:27.528677991 +0000 UTC m=+6608.296849610" watchObservedRunningTime="2025-11-30 08:36:27.539078652 +0000 UTC m=+6608.307250261" Nov 30 08:37:32 crc kubenswrapper[4941]: I1130 08:37:32.979200 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:37:32 crc kubenswrapper[4941]: I1130 08:37:32.980250 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:38:02 crc kubenswrapper[4941]: I1130 08:38:02.978669 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:38:02 crc kubenswrapper[4941]: I1130 08:38:02.980528 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:38:04 crc kubenswrapper[4941]: I1130 08:38:04.776949 4941 generic.go:334] "Generic (PLEG): container finished" podID="00aafada-6a3d-4ea6-8e0d-e7090d7acd63" containerID="c0f836a821f4bfcc8996d22fb6fa7b2ebfaddcbd4660a8f66383f4fb2611b109" exitCode=0 Nov 30 08:38:04 crc kubenswrapper[4941]: I1130 08:38:04.777054 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" event={"ID":"00aafada-6a3d-4ea6-8e0d-e7090d7acd63","Type":"ContainerDied","Data":"c0f836a821f4bfcc8996d22fb6fa7b2ebfaddcbd4660a8f66383f4fb2611b109"} Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.375348 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.515242 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfvc4\" (UniqueName: \"kubernetes.io/projected/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-kube-api-access-dfvc4\") pod \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.515605 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-inventory\") pod \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.515682 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ceph\") pod \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.515826 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ssh-key\") pod \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\" (UID: \"00aafada-6a3d-4ea6-8e0d-e7090d7acd63\") " Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.522356 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-kube-api-access-dfvc4" (OuterVolumeSpecName: "kube-api-access-dfvc4") pod "00aafada-6a3d-4ea6-8e0d-e7090d7acd63" (UID: "00aafada-6a3d-4ea6-8e0d-e7090d7acd63"). InnerVolumeSpecName "kube-api-access-dfvc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.522733 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ceph" (OuterVolumeSpecName: "ceph") pod "00aafada-6a3d-4ea6-8e0d-e7090d7acd63" (UID: "00aafada-6a3d-4ea6-8e0d-e7090d7acd63"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.549079 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "00aafada-6a3d-4ea6-8e0d-e7090d7acd63" (UID: "00aafada-6a3d-4ea6-8e0d-e7090d7acd63"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.573594 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-inventory" (OuterVolumeSpecName: "inventory") pod "00aafada-6a3d-4ea6-8e0d-e7090d7acd63" (UID: "00aafada-6a3d-4ea6-8e0d-e7090d7acd63"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.618350 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.618387 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfvc4\" (UniqueName: \"kubernetes.io/projected/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-kube-api-access-dfvc4\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.618403 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.618413 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/00aafada-6a3d-4ea6-8e0d-e7090d7acd63-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.806430 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" event={"ID":"00aafada-6a3d-4ea6-8e0d-e7090d7acd63","Type":"ContainerDied","Data":"003af0f5fdfafa567a043ae8588d5632195e64eba1f35149f25667515411aba2"} Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.806484 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="003af0f5fdfafa567a043ae8588d5632195e64eba1f35149f25667515411aba2" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.806553 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mmdxs" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.908672 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-dc8gw"] Nov 30 08:38:06 crc kubenswrapper[4941]: E1130 08:38:06.909389 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00aafada-6a3d-4ea6-8e0d-e7090d7acd63" containerName="download-cache-openstack-openstack-cell1" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.909411 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="00aafada-6a3d-4ea6-8e0d-e7090d7acd63" containerName="download-cache-openstack-openstack-cell1" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.909742 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="00aafada-6a3d-4ea6-8e0d-e7090d7acd63" containerName="download-cache-openstack-openstack-cell1" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.911014 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.915095 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.916060 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.916343 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.918386 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:38:06 crc kubenswrapper[4941]: I1130 08:38:06.956467 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-dc8gw"] Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.031939 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ceph\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.032761 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-inventory\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.032875 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ssh-key\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.032968 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdh9h\" (UniqueName: \"kubernetes.io/projected/e432e891-b748-4cee-a941-553e6f7d6140-kube-api-access-tdh9h\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.135826 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ceph\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.135893 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-inventory\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.135944 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ssh-key\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.136005 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdh9h\" (UniqueName: \"kubernetes.io/projected/e432e891-b748-4cee-a941-553e6f7d6140-kube-api-access-tdh9h\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.147074 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ssh-key\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.147219 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ceph\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.151088 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-inventory\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.158105 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdh9h\" (UniqueName: \"kubernetes.io/projected/e432e891-b748-4cee-a941-553e6f7d6140-kube-api-access-tdh9h\") pod \"configure-network-openstack-openstack-cell1-dc8gw\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.229585 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.817539 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-dc8gw"] Nov 30 08:38:07 crc kubenswrapper[4941]: I1130 08:38:07.837581 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:38:08 crc kubenswrapper[4941]: I1130 08:38:08.827025 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" event={"ID":"e432e891-b748-4cee-a941-553e6f7d6140","Type":"ContainerStarted","Data":"9bd0bd66143695ac386c8e7e60c8fb10c3a5acd7a1b421bee62ea3061ae0ed5c"} Nov 30 08:38:08 crc kubenswrapper[4941]: I1130 08:38:08.827446 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" event={"ID":"e432e891-b748-4cee-a941-553e6f7d6140","Type":"ContainerStarted","Data":"874e2fda1aeac022c806db6484ac8486a2b83e272ec8b6f719a2fe8c0c87d4f7"} Nov 30 08:38:08 crc kubenswrapper[4941]: I1130 08:38:08.853005 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" podStartSLOduration=2.242494308 podStartE2EDuration="2.852982459s" podCreationTimestamp="2025-11-30 08:38:06 +0000 UTC" firstStartedPulling="2025-11-30 08:38:07.836777588 +0000 UTC m=+6708.604949197" lastFinishedPulling="2025-11-30 08:38:08.447265729 +0000 UTC m=+6709.215437348" observedRunningTime="2025-11-30 08:38:08.844294011 +0000 UTC m=+6709.612465620" watchObservedRunningTime="2025-11-30 08:38:08.852982459 +0000 UTC m=+6709.621154068" Nov 30 08:38:32 crc kubenswrapper[4941]: I1130 08:38:32.978712 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:38:32 crc kubenswrapper[4941]: I1130 08:38:32.979596 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:38:32 crc kubenswrapper[4941]: I1130 08:38:32.979686 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:38:32 crc kubenswrapper[4941]: I1130 08:38:32.981365 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:38:32 crc kubenswrapper[4941]: I1130 08:38:32.981461 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" gracePeriod=600 Nov 30 08:38:33 crc kubenswrapper[4941]: E1130 08:38:33.115626 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:38:33 crc kubenswrapper[4941]: I1130 08:38:33.171926 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" exitCode=0 Nov 30 08:38:33 crc kubenswrapper[4941]: I1130 08:38:33.172505 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499"} Nov 30 08:38:33 crc kubenswrapper[4941]: I1130 08:38:33.172787 4941 scope.go:117] "RemoveContainer" containerID="24bc20ae8ebb5e63f354521469f692ee4d9f3f6a51f811a3e50661d6460945e1" Nov 30 08:38:33 crc kubenswrapper[4941]: I1130 08:38:33.173802 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:38:33 crc kubenswrapper[4941]: E1130 08:38:33.174179 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.356607 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c6z7d"] Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.361519 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.373838 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6z7d"] Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.542226 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-utilities\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.542713 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9k86\" (UniqueName: \"kubernetes.io/projected/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-kube-api-access-c9k86\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.543858 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-catalog-content\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.646124 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-catalog-content\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.646208 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-utilities\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.646277 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9k86\" (UniqueName: \"kubernetes.io/projected/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-kube-api-access-c9k86\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.647397 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-catalog-content\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.648150 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-utilities\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.669752 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9k86\" (UniqueName: \"kubernetes.io/projected/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-kube-api-access-c9k86\") pod \"redhat-marketplace-c6z7d\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:42 crc kubenswrapper[4941]: I1130 08:38:42.693563 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:43 crc kubenswrapper[4941]: I1130 08:38:43.200076 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6z7d"] Nov 30 08:38:43 crc kubenswrapper[4941]: I1130 08:38:43.356227 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6z7d" event={"ID":"f108fd79-bae0-4e8d-aad1-48b58f28eaa8","Type":"ContainerStarted","Data":"2cd923ae8304fed66182ccdd7b33eb57857484ee988640a78eaaf3a6bee566be"} Nov 30 08:38:44 crc kubenswrapper[4941]: I1130 08:38:44.373723 4941 generic.go:334] "Generic (PLEG): container finished" podID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerID="88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383" exitCode=0 Nov 30 08:38:44 crc kubenswrapper[4941]: I1130 08:38:44.373824 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6z7d" event={"ID":"f108fd79-bae0-4e8d-aad1-48b58f28eaa8","Type":"ContainerDied","Data":"88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383"} Nov 30 08:38:46 crc kubenswrapper[4941]: I1130 08:38:46.403346 4941 generic.go:334] "Generic (PLEG): container finished" podID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerID="c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22" exitCode=0 Nov 30 08:38:46 crc kubenswrapper[4941]: I1130 08:38:46.403488 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6z7d" event={"ID":"f108fd79-bae0-4e8d-aad1-48b58f28eaa8","Type":"ContainerDied","Data":"c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22"} Nov 30 08:38:47 crc kubenswrapper[4941]: I1130 08:38:47.421917 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6z7d" event={"ID":"f108fd79-bae0-4e8d-aad1-48b58f28eaa8","Type":"ContainerStarted","Data":"84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f"} Nov 30 08:38:47 crc kubenswrapper[4941]: I1130 08:38:47.465642 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c6z7d" podStartSLOduration=2.8928538489999998 podStartE2EDuration="5.465604484s" podCreationTimestamp="2025-11-30 08:38:42 +0000 UTC" firstStartedPulling="2025-11-30 08:38:44.378741877 +0000 UTC m=+6745.146913526" lastFinishedPulling="2025-11-30 08:38:46.951492522 +0000 UTC m=+6747.719664161" observedRunningTime="2025-11-30 08:38:47.444825531 +0000 UTC m=+6748.212997130" watchObservedRunningTime="2025-11-30 08:38:47.465604484 +0000 UTC m=+6748.233776123" Nov 30 08:38:48 crc kubenswrapper[4941]: I1130 08:38:48.522131 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:38:48 crc kubenswrapper[4941]: E1130 08:38:48.523166 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:38:52 crc kubenswrapper[4941]: I1130 08:38:52.694189 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:52 crc kubenswrapper[4941]: I1130 08:38:52.695194 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:52 crc kubenswrapper[4941]: I1130 08:38:52.775223 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:53 crc kubenswrapper[4941]: I1130 08:38:53.549463 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:53 crc kubenswrapper[4941]: I1130 08:38:53.602698 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6z7d"] Nov 30 08:38:55 crc kubenswrapper[4941]: I1130 08:38:55.513287 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c6z7d" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="registry-server" containerID="cri-o://84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f" gracePeriod=2 Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.015109 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.150224 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-utilities\") pod \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.150272 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-catalog-content\") pod \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.150378 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9k86\" (UniqueName: \"kubernetes.io/projected/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-kube-api-access-c9k86\") pod \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\" (UID: \"f108fd79-bae0-4e8d-aad1-48b58f28eaa8\") " Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.152800 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-utilities" (OuterVolumeSpecName: "utilities") pod "f108fd79-bae0-4e8d-aad1-48b58f28eaa8" (UID: "f108fd79-bae0-4e8d-aad1-48b58f28eaa8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.158519 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-kube-api-access-c9k86" (OuterVolumeSpecName: "kube-api-access-c9k86") pod "f108fd79-bae0-4e8d-aad1-48b58f28eaa8" (UID: "f108fd79-bae0-4e8d-aad1-48b58f28eaa8"). InnerVolumeSpecName "kube-api-access-c9k86". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.178755 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f108fd79-bae0-4e8d-aad1-48b58f28eaa8" (UID: "f108fd79-bae0-4e8d-aad1-48b58f28eaa8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.254344 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.254645 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.254767 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9k86\" (UniqueName: \"kubernetes.io/projected/f108fd79-bae0-4e8d-aad1-48b58f28eaa8-kube-api-access-c9k86\") on node \"crc\" DevicePath \"\"" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.537526 4941 generic.go:334] "Generic (PLEG): container finished" podID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerID="84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f" exitCode=0 Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.537604 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6z7d" event={"ID":"f108fd79-bae0-4e8d-aad1-48b58f28eaa8","Type":"ContainerDied","Data":"84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f"} Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.537650 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6z7d" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.537707 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6z7d" event={"ID":"f108fd79-bae0-4e8d-aad1-48b58f28eaa8","Type":"ContainerDied","Data":"2cd923ae8304fed66182ccdd7b33eb57857484ee988640a78eaaf3a6bee566be"} Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.537801 4941 scope.go:117] "RemoveContainer" containerID="84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.599742 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6z7d"] Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.604920 4941 scope.go:117] "RemoveContainer" containerID="c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.614135 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6z7d"] Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.645070 4941 scope.go:117] "RemoveContainer" containerID="88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.701208 4941 scope.go:117] "RemoveContainer" containerID="84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f" Nov 30 08:38:56 crc kubenswrapper[4941]: E1130 08:38:56.702533 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f\": container with ID starting with 84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f not found: ID does not exist" containerID="84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.702626 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f"} err="failed to get container status \"84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f\": rpc error: code = NotFound desc = could not find container \"84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f\": container with ID starting with 84ddccf16e5e3435b593e447b9a604c761ef5419ac8785b60eb63c0032b1f43f not found: ID does not exist" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.702760 4941 scope.go:117] "RemoveContainer" containerID="c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22" Nov 30 08:38:56 crc kubenswrapper[4941]: E1130 08:38:56.703230 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22\": container with ID starting with c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22 not found: ID does not exist" containerID="c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.703369 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22"} err="failed to get container status \"c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22\": rpc error: code = NotFound desc = could not find container \"c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22\": container with ID starting with c3706227fdde033d01820cfd11277de5dfafb5d4af1cd6311ee75fefa3146e22 not found: ID does not exist" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.703459 4941 scope.go:117] "RemoveContainer" containerID="88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383" Nov 30 08:38:56 crc kubenswrapper[4941]: E1130 08:38:56.703911 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383\": container with ID starting with 88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383 not found: ID does not exist" containerID="88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383" Nov 30 08:38:56 crc kubenswrapper[4941]: I1130 08:38:56.703951 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383"} err="failed to get container status \"88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383\": rpc error: code = NotFound desc = could not find container \"88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383\": container with ID starting with 88cb0080ae1f8275165f7e7f460467abef8def91cf55bf9f419bc881bb271383 not found: ID does not exist" Nov 30 08:38:57 crc kubenswrapper[4941]: I1130 08:38:57.542481 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" path="/var/lib/kubelet/pods/f108fd79-bae0-4e8d-aad1-48b58f28eaa8/volumes" Nov 30 08:39:00 crc kubenswrapper[4941]: I1130 08:39:00.522215 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:39:00 crc kubenswrapper[4941]: E1130 08:39:00.525548 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:39:11 crc kubenswrapper[4941]: I1130 08:39:11.522395 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:39:11 crc kubenswrapper[4941]: E1130 08:39:11.523896 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:39:26 crc kubenswrapper[4941]: I1130 08:39:26.522473 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:39:26 crc kubenswrapper[4941]: E1130 08:39:26.523570 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:39:30 crc kubenswrapper[4941]: E1130 08:39:30.730507 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode432e891_b748_4cee_a941_553e6f7d6140.slice/crio-9bd0bd66143695ac386c8e7e60c8fb10c3a5acd7a1b421bee62ea3061ae0ed5c.scope\": RecentStats: unable to find data in memory cache]" Nov 30 08:39:31 crc kubenswrapper[4941]: I1130 08:39:31.005362 4941 generic.go:334] "Generic (PLEG): container finished" podID="e432e891-b748-4cee-a941-553e6f7d6140" containerID="9bd0bd66143695ac386c8e7e60c8fb10c3a5acd7a1b421bee62ea3061ae0ed5c" exitCode=0 Nov 30 08:39:31 crc kubenswrapper[4941]: I1130 08:39:31.005502 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" event={"ID":"e432e891-b748-4cee-a941-553e6f7d6140","Type":"ContainerDied","Data":"9bd0bd66143695ac386c8e7e60c8fb10c3a5acd7a1b421bee62ea3061ae0ed5c"} Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.620877 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.731296 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdh9h\" (UniqueName: \"kubernetes.io/projected/e432e891-b748-4cee-a941-553e6f7d6140-kube-api-access-tdh9h\") pod \"e432e891-b748-4cee-a941-553e6f7d6140\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.731489 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-inventory\") pod \"e432e891-b748-4cee-a941-553e6f7d6140\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.731517 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ssh-key\") pod \"e432e891-b748-4cee-a941-553e6f7d6140\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.731726 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ceph\") pod \"e432e891-b748-4cee-a941-553e6f7d6140\" (UID: \"e432e891-b748-4cee-a941-553e6f7d6140\") " Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.738257 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ceph" (OuterVolumeSpecName: "ceph") pod "e432e891-b748-4cee-a941-553e6f7d6140" (UID: "e432e891-b748-4cee-a941-553e6f7d6140"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.740239 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e432e891-b748-4cee-a941-553e6f7d6140-kube-api-access-tdh9h" (OuterVolumeSpecName: "kube-api-access-tdh9h") pod "e432e891-b748-4cee-a941-553e6f7d6140" (UID: "e432e891-b748-4cee-a941-553e6f7d6140"). InnerVolumeSpecName "kube-api-access-tdh9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.766072 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e432e891-b748-4cee-a941-553e6f7d6140" (UID: "e432e891-b748-4cee-a941-553e6f7d6140"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.766570 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-inventory" (OuterVolumeSpecName: "inventory") pod "e432e891-b748-4cee-a941-553e6f7d6140" (UID: "e432e891-b748-4cee-a941-553e6f7d6140"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.834313 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdh9h\" (UniqueName: \"kubernetes.io/projected/e432e891-b748-4cee-a941-553e6f7d6140-kube-api-access-tdh9h\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.834956 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.835040 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:32 crc kubenswrapper[4941]: I1130 08:39:32.835137 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e432e891-b748-4cee-a941-553e6f7d6140-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.027569 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" event={"ID":"e432e891-b748-4cee-a941-553e6f7d6140","Type":"ContainerDied","Data":"874e2fda1aeac022c806db6484ac8486a2b83e272ec8b6f719a2fe8c0c87d4f7"} Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.027615 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="874e2fda1aeac022c806db6484ac8486a2b83e272ec8b6f719a2fe8c0c87d4f7" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.027657 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-dc8gw" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142064 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-b9kqh"] Nov 30 08:39:33 crc kubenswrapper[4941]: E1130 08:39:33.142554 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="extract-content" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142572 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="extract-content" Nov 30 08:39:33 crc kubenswrapper[4941]: E1130 08:39:33.142598 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="registry-server" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142606 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="registry-server" Nov 30 08:39:33 crc kubenswrapper[4941]: E1130 08:39:33.142632 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="extract-utilities" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142642 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="extract-utilities" Nov 30 08:39:33 crc kubenswrapper[4941]: E1130 08:39:33.142655 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e432e891-b748-4cee-a941-553e6f7d6140" containerName="configure-network-openstack-openstack-cell1" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142663 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e432e891-b748-4cee-a941-553e6f7d6140" containerName="configure-network-openstack-openstack-cell1" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142886 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e432e891-b748-4cee-a941-553e6f7d6140" containerName="configure-network-openstack-openstack-cell1" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.142912 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f108fd79-bae0-4e8d-aad1-48b58f28eaa8" containerName="registry-server" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.143764 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.145694 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.146676 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.146821 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.147649 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.166120 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-b9kqh"] Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.246015 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ceph\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.246173 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ssh-key\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.246243 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br9gg\" (UniqueName: \"kubernetes.io/projected/06030610-8d39-469d-8cc8-ec8dfe976e23-kube-api-access-br9gg\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.246321 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-inventory\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.348105 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ssh-key\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.348360 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br9gg\" (UniqueName: \"kubernetes.io/projected/06030610-8d39-469d-8cc8-ec8dfe976e23-kube-api-access-br9gg\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.348623 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-inventory\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.348827 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ceph\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.354428 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ceph\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.355997 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-inventory\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.356499 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ssh-key\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.371950 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br9gg\" (UniqueName: \"kubernetes.io/projected/06030610-8d39-469d-8cc8-ec8dfe976e23-kube-api-access-br9gg\") pod \"validate-network-openstack-openstack-cell1-b9kqh\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:33 crc kubenswrapper[4941]: I1130 08:39:33.461557 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:34 crc kubenswrapper[4941]: I1130 08:39:34.050769 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-b9kqh"] Nov 30 08:39:35 crc kubenswrapper[4941]: I1130 08:39:35.083750 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" event={"ID":"06030610-8d39-469d-8cc8-ec8dfe976e23","Type":"ContainerStarted","Data":"159bac9419e36a88e176be3d75aec9e7379d71a53b6234134e909b758931c3b5"} Nov 30 08:39:35 crc kubenswrapper[4941]: I1130 08:39:35.084720 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" event={"ID":"06030610-8d39-469d-8cc8-ec8dfe976e23","Type":"ContainerStarted","Data":"2994dbdff0260e09cca4f2b0a9518a0f290a3f5a5433c259ec6db459d98fa372"} Nov 30 08:39:35 crc kubenswrapper[4941]: I1130 08:39:35.122164 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" podStartSLOduration=1.636269717 podStartE2EDuration="2.122141865s" podCreationTimestamp="2025-11-30 08:39:33 +0000 UTC" firstStartedPulling="2025-11-30 08:39:34.050658505 +0000 UTC m=+6794.818830114" lastFinishedPulling="2025-11-30 08:39:34.536530653 +0000 UTC m=+6795.304702262" observedRunningTime="2025-11-30 08:39:35.114785348 +0000 UTC m=+6795.882956947" watchObservedRunningTime="2025-11-30 08:39:35.122141865 +0000 UTC m=+6795.890313484" Nov 30 08:39:40 crc kubenswrapper[4941]: I1130 08:39:40.174191 4941 generic.go:334] "Generic (PLEG): container finished" podID="06030610-8d39-469d-8cc8-ec8dfe976e23" containerID="159bac9419e36a88e176be3d75aec9e7379d71a53b6234134e909b758931c3b5" exitCode=0 Nov 30 08:39:40 crc kubenswrapper[4941]: I1130 08:39:40.174936 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" event={"ID":"06030610-8d39-469d-8cc8-ec8dfe976e23","Type":"ContainerDied","Data":"159bac9419e36a88e176be3d75aec9e7379d71a53b6234134e909b758931c3b5"} Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.523276 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:39:41 crc kubenswrapper[4941]: E1130 08:39:41.524032 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.647474 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.759494 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-inventory\") pod \"06030610-8d39-469d-8cc8-ec8dfe976e23\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.759639 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ssh-key\") pod \"06030610-8d39-469d-8cc8-ec8dfe976e23\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.759859 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br9gg\" (UniqueName: \"kubernetes.io/projected/06030610-8d39-469d-8cc8-ec8dfe976e23-kube-api-access-br9gg\") pod \"06030610-8d39-469d-8cc8-ec8dfe976e23\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.759898 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ceph\") pod \"06030610-8d39-469d-8cc8-ec8dfe976e23\" (UID: \"06030610-8d39-469d-8cc8-ec8dfe976e23\") " Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.765510 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ceph" (OuterVolumeSpecName: "ceph") pod "06030610-8d39-469d-8cc8-ec8dfe976e23" (UID: "06030610-8d39-469d-8cc8-ec8dfe976e23"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.765624 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06030610-8d39-469d-8cc8-ec8dfe976e23-kube-api-access-br9gg" (OuterVolumeSpecName: "kube-api-access-br9gg") pod "06030610-8d39-469d-8cc8-ec8dfe976e23" (UID: "06030610-8d39-469d-8cc8-ec8dfe976e23"). InnerVolumeSpecName "kube-api-access-br9gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.789519 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "06030610-8d39-469d-8cc8-ec8dfe976e23" (UID: "06030610-8d39-469d-8cc8-ec8dfe976e23"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.789751 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-inventory" (OuterVolumeSpecName: "inventory") pod "06030610-8d39-469d-8cc8-ec8dfe976e23" (UID: "06030610-8d39-469d-8cc8-ec8dfe976e23"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.862588 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br9gg\" (UniqueName: \"kubernetes.io/projected/06030610-8d39-469d-8cc8-ec8dfe976e23-kube-api-access-br9gg\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.862637 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.862650 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:41 crc kubenswrapper[4941]: I1130 08:39:41.862664 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06030610-8d39-469d-8cc8-ec8dfe976e23-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.195523 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" event={"ID":"06030610-8d39-469d-8cc8-ec8dfe976e23","Type":"ContainerDied","Data":"2994dbdff0260e09cca4f2b0a9518a0f290a3f5a5433c259ec6db459d98fa372"} Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.195571 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2994dbdff0260e09cca4f2b0a9518a0f290a3f5a5433c259ec6db459d98fa372" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.195929 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-b9kqh" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.292753 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-qj956"] Nov 30 08:39:42 crc kubenswrapper[4941]: E1130 08:39:42.293259 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06030610-8d39-469d-8cc8-ec8dfe976e23" containerName="validate-network-openstack-openstack-cell1" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.293283 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="06030610-8d39-469d-8cc8-ec8dfe976e23" containerName="validate-network-openstack-openstack-cell1" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.293592 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="06030610-8d39-469d-8cc8-ec8dfe976e23" containerName="validate-network-openstack-openstack-cell1" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.294514 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.298346 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.298640 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.298436 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.298527 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.351266 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-qj956"] Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.383213 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ssh-key\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.383346 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-inventory\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.383429 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ceph\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.383466 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hdfz\" (UniqueName: \"kubernetes.io/projected/716f9427-4add-453c-9d69-f40949a5ee12-kube-api-access-9hdfz\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.486217 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ssh-key\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.486315 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-inventory\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.486386 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ceph\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.486412 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hdfz\" (UniqueName: \"kubernetes.io/projected/716f9427-4add-453c-9d69-f40949a5ee12-kube-api-access-9hdfz\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.492133 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ssh-key\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.492237 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-inventory\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.494016 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ceph\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.507898 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hdfz\" (UniqueName: \"kubernetes.io/projected/716f9427-4add-453c-9d69-f40949a5ee12-kube-api-access-9hdfz\") pod \"install-os-openstack-openstack-cell1-qj956\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:42 crc kubenswrapper[4941]: I1130 08:39:42.659917 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:39:43 crc kubenswrapper[4941]: I1130 08:39:43.260943 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-qj956"] Nov 30 08:39:44 crc kubenswrapper[4941]: I1130 08:39:44.217680 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-qj956" event={"ID":"716f9427-4add-453c-9d69-f40949a5ee12","Type":"ContainerStarted","Data":"028f908202906dbf2aeb1220e18307c672f2b094f68cb04fa298b57245af3174"} Nov 30 08:39:44 crc kubenswrapper[4941]: I1130 08:39:44.218129 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-qj956" event={"ID":"716f9427-4add-453c-9d69-f40949a5ee12","Type":"ContainerStarted","Data":"b23ddd637b124ac1977d247dded8549230bb33b416b1bd029ff5de6a18197b6d"} Nov 30 08:39:44 crc kubenswrapper[4941]: I1130 08:39:44.252128 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-qj956" podStartSLOduration=1.743167385 podStartE2EDuration="2.252098776s" podCreationTimestamp="2025-11-30 08:39:42 +0000 UTC" firstStartedPulling="2025-11-30 08:39:43.262846208 +0000 UTC m=+6804.031017837" lastFinishedPulling="2025-11-30 08:39:43.771777619 +0000 UTC m=+6804.539949228" observedRunningTime="2025-11-30 08:39:44.235373159 +0000 UTC m=+6805.003544788" watchObservedRunningTime="2025-11-30 08:39:44.252098776 +0000 UTC m=+6805.020270405" Nov 30 08:39:55 crc kubenswrapper[4941]: I1130 08:39:55.521842 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:39:55 crc kubenswrapper[4941]: E1130 08:39:55.523038 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:40:08 crc kubenswrapper[4941]: I1130 08:40:08.522868 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:40:08 crc kubenswrapper[4941]: E1130 08:40:08.525529 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:40:23 crc kubenswrapper[4941]: I1130 08:40:23.522362 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:40:23 crc kubenswrapper[4941]: E1130 08:40:23.523523 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:40:35 crc kubenswrapper[4941]: I1130 08:40:35.828643 4941 generic.go:334] "Generic (PLEG): container finished" podID="716f9427-4add-453c-9d69-f40949a5ee12" containerID="028f908202906dbf2aeb1220e18307c672f2b094f68cb04fa298b57245af3174" exitCode=0 Nov 30 08:40:35 crc kubenswrapper[4941]: I1130 08:40:35.828750 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-qj956" event={"ID":"716f9427-4add-453c-9d69-f40949a5ee12","Type":"ContainerDied","Data":"028f908202906dbf2aeb1220e18307c672f2b094f68cb04fa298b57245af3174"} Nov 30 08:40:36 crc kubenswrapper[4941]: I1130 08:40:36.522016 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:40:36 crc kubenswrapper[4941]: E1130 08:40:36.522300 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.418877 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.575288 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hdfz\" (UniqueName: \"kubernetes.io/projected/716f9427-4add-453c-9d69-f40949a5ee12-kube-api-access-9hdfz\") pod \"716f9427-4add-453c-9d69-f40949a5ee12\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.575782 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ssh-key\") pod \"716f9427-4add-453c-9d69-f40949a5ee12\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.577595 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ceph\") pod \"716f9427-4add-453c-9d69-f40949a5ee12\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.577930 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-inventory\") pod \"716f9427-4add-453c-9d69-f40949a5ee12\" (UID: \"716f9427-4add-453c-9d69-f40949a5ee12\") " Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.584251 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/716f9427-4add-453c-9d69-f40949a5ee12-kube-api-access-9hdfz" (OuterVolumeSpecName: "kube-api-access-9hdfz") pod "716f9427-4add-453c-9d69-f40949a5ee12" (UID: "716f9427-4add-453c-9d69-f40949a5ee12"). InnerVolumeSpecName "kube-api-access-9hdfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.584493 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ceph" (OuterVolumeSpecName: "ceph") pod "716f9427-4add-453c-9d69-f40949a5ee12" (UID: "716f9427-4add-453c-9d69-f40949a5ee12"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.612479 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-inventory" (OuterVolumeSpecName: "inventory") pod "716f9427-4add-453c-9d69-f40949a5ee12" (UID: "716f9427-4add-453c-9d69-f40949a5ee12"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.617667 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "716f9427-4add-453c-9d69-f40949a5ee12" (UID: "716f9427-4add-453c-9d69-f40949a5ee12"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.681139 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.681176 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hdfz\" (UniqueName: \"kubernetes.io/projected/716f9427-4add-453c-9d69-f40949a5ee12-kube-api-access-9hdfz\") on node \"crc\" DevicePath \"\"" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.681189 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.681200 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/716f9427-4add-453c-9d69-f40949a5ee12-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.853833 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-qj956" event={"ID":"716f9427-4add-453c-9d69-f40949a5ee12","Type":"ContainerDied","Data":"b23ddd637b124ac1977d247dded8549230bb33b416b1bd029ff5de6a18197b6d"} Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.853890 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b23ddd637b124ac1977d247dded8549230bb33b416b1bd029ff5de6a18197b6d" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.854007 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-qj956" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.958525 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-5prpd"] Nov 30 08:40:37 crc kubenswrapper[4941]: E1130 08:40:37.959137 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716f9427-4add-453c-9d69-f40949a5ee12" containerName="install-os-openstack-openstack-cell1" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.959162 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="716f9427-4add-453c-9d69-f40949a5ee12" containerName="install-os-openstack-openstack-cell1" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.959487 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="716f9427-4add-453c-9d69-f40949a5ee12" containerName="install-os-openstack-openstack-cell1" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.960513 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.964189 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.964606 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.965090 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.965830 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:40:37 crc kubenswrapper[4941]: I1130 08:40:37.979316 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-5prpd"] Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.092205 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqrqc\" (UniqueName: \"kubernetes.io/projected/12f91628-4c27-4506-9398-777d63c07d0b-kube-api-access-fqrqc\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.092375 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-inventory\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.092457 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ssh-key\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.092492 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ceph\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.195367 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqrqc\" (UniqueName: \"kubernetes.io/projected/12f91628-4c27-4506-9398-777d63c07d0b-kube-api-access-fqrqc\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.195466 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-inventory\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.195533 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ssh-key\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.195555 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ceph\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.201263 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ceph\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.201340 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-inventory\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.201290 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ssh-key\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.223525 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqrqc\" (UniqueName: \"kubernetes.io/projected/12f91628-4c27-4506-9398-777d63c07d0b-kube-api-access-fqrqc\") pod \"configure-os-openstack-openstack-cell1-5prpd\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:38 crc kubenswrapper[4941]: I1130 08:40:38.295174 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:40:39 crc kubenswrapper[4941]: I1130 08:40:39.065730 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-5prpd"] Nov 30 08:40:39 crc kubenswrapper[4941]: W1130 08:40:39.072629 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12f91628_4c27_4506_9398_777d63c07d0b.slice/crio-5f59348b6b2a324d2cfc7427aa673814256ba4fd5efba5c1451daa5f4b7637a1 WatchSource:0}: Error finding container 5f59348b6b2a324d2cfc7427aa673814256ba4fd5efba5c1451daa5f4b7637a1: Status 404 returned error can't find the container with id 5f59348b6b2a324d2cfc7427aa673814256ba4fd5efba5c1451daa5f4b7637a1 Nov 30 08:40:39 crc kubenswrapper[4941]: I1130 08:40:39.878354 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" event={"ID":"12f91628-4c27-4506-9398-777d63c07d0b","Type":"ContainerStarted","Data":"fb2352e8711f5d29642725f7e6c32c13c3fcc69068400380449e011f91ecdc04"} Nov 30 08:40:39 crc kubenswrapper[4941]: I1130 08:40:39.878939 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" event={"ID":"12f91628-4c27-4506-9398-777d63c07d0b","Type":"ContainerStarted","Data":"5f59348b6b2a324d2cfc7427aa673814256ba4fd5efba5c1451daa5f4b7637a1"} Nov 30 08:40:39 crc kubenswrapper[4941]: I1130 08:40:39.906973 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" podStartSLOduration=2.465760465 podStartE2EDuration="2.906953354s" podCreationTimestamp="2025-11-30 08:40:37 +0000 UTC" firstStartedPulling="2025-11-30 08:40:39.074861813 +0000 UTC m=+6859.843033422" lastFinishedPulling="2025-11-30 08:40:39.516054702 +0000 UTC m=+6860.284226311" observedRunningTime="2025-11-30 08:40:39.900584907 +0000 UTC m=+6860.668756536" watchObservedRunningTime="2025-11-30 08:40:39.906953354 +0000 UTC m=+6860.675124963" Nov 30 08:40:50 crc kubenswrapper[4941]: I1130 08:40:50.522925 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:40:50 crc kubenswrapper[4941]: E1130 08:40:50.524183 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:40:56 crc kubenswrapper[4941]: I1130 08:40:56.736608 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/openstack-operator-index-dfm6s" podUID="153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea" containerName="registry-server" probeResult="failure" output=< Nov 30 08:40:56 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:40:56 crc kubenswrapper[4941]: > Nov 30 08:41:01 crc kubenswrapper[4941]: I1130 08:41:01.521816 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:41:01 crc kubenswrapper[4941]: E1130 08:41:01.522950 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:41:16 crc kubenswrapper[4941]: I1130 08:41:16.521986 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:41:16 crc kubenswrapper[4941]: E1130 08:41:16.523515 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:41:29 crc kubenswrapper[4941]: I1130 08:41:29.522958 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:41:29 crc kubenswrapper[4941]: E1130 08:41:29.524200 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:41:30 crc kubenswrapper[4941]: I1130 08:41:30.017957 4941 generic.go:334] "Generic (PLEG): container finished" podID="12f91628-4c27-4506-9398-777d63c07d0b" containerID="fb2352e8711f5d29642725f7e6c32c13c3fcc69068400380449e011f91ecdc04" exitCode=0 Nov 30 08:41:30 crc kubenswrapper[4941]: I1130 08:41:30.018041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" event={"ID":"12f91628-4c27-4506-9398-777d63c07d0b","Type":"ContainerDied","Data":"fb2352e8711f5d29642725f7e6c32c13c3fcc69068400380449e011f91ecdc04"} Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.688528 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.718498 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqrqc\" (UniqueName: \"kubernetes.io/projected/12f91628-4c27-4506-9398-777d63c07d0b-kube-api-access-fqrqc\") pod \"12f91628-4c27-4506-9398-777d63c07d0b\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.719103 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ceph\") pod \"12f91628-4c27-4506-9398-777d63c07d0b\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.719309 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ssh-key\") pod \"12f91628-4c27-4506-9398-777d63c07d0b\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.719404 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-inventory\") pod \"12f91628-4c27-4506-9398-777d63c07d0b\" (UID: \"12f91628-4c27-4506-9398-777d63c07d0b\") " Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.763452 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ceph" (OuterVolumeSpecName: "ceph") pod "12f91628-4c27-4506-9398-777d63c07d0b" (UID: "12f91628-4c27-4506-9398-777d63c07d0b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.763491 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12f91628-4c27-4506-9398-777d63c07d0b-kube-api-access-fqrqc" (OuterVolumeSpecName: "kube-api-access-fqrqc") pod "12f91628-4c27-4506-9398-777d63c07d0b" (UID: "12f91628-4c27-4506-9398-777d63c07d0b"). InnerVolumeSpecName "kube-api-access-fqrqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.772682 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-inventory" (OuterVolumeSpecName: "inventory") pod "12f91628-4c27-4506-9398-777d63c07d0b" (UID: "12f91628-4c27-4506-9398-777d63c07d0b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.799664 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "12f91628-4c27-4506-9398-777d63c07d0b" (UID: "12f91628-4c27-4506-9398-777d63c07d0b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.822434 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqrqc\" (UniqueName: \"kubernetes.io/projected/12f91628-4c27-4506-9398-777d63c07d0b-kube-api-access-fqrqc\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.822547 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.822561 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:31 crc kubenswrapper[4941]: I1130 08:41:31.822573 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/12f91628-4c27-4506-9398-777d63c07d0b-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.044484 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" event={"ID":"12f91628-4c27-4506-9398-777d63c07d0b","Type":"ContainerDied","Data":"5f59348b6b2a324d2cfc7427aa673814256ba4fd5efba5c1451daa5f4b7637a1"} Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.044540 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f59348b6b2a324d2cfc7427aa673814256ba4fd5efba5c1451daa5f4b7637a1" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.044647 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-5prpd" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.195835 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-58kvt"] Nov 30 08:41:32 crc kubenswrapper[4941]: E1130 08:41:32.196668 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f91628-4c27-4506-9398-777d63c07d0b" containerName="configure-os-openstack-openstack-cell1" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.196790 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f91628-4c27-4506-9398-777d63c07d0b" containerName="configure-os-openstack-openstack-cell1" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.197185 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f91628-4c27-4506-9398-777d63c07d0b" containerName="configure-os-openstack-openstack-cell1" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.198372 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.201775 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.201896 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.202175 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.207821 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.216637 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-58kvt"] Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.234350 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsj5g\" (UniqueName: \"kubernetes.io/projected/cda00e1a-68e9-45f3-85f8-840fc4614400-kube-api-access-lsj5g\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.234449 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ceph\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.234527 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.234557 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-inventory-0\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.337061 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsj5g\" (UniqueName: \"kubernetes.io/projected/cda00e1a-68e9-45f3-85f8-840fc4614400-kube-api-access-lsj5g\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.337553 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ceph\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.337710 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.337825 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-inventory-0\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.343715 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.344175 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-inventory-0\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.344743 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ceph\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.354744 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsj5g\" (UniqueName: \"kubernetes.io/projected/cda00e1a-68e9-45f3-85f8-840fc4614400-kube-api-access-lsj5g\") pod \"ssh-known-hosts-openstack-58kvt\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:32 crc kubenswrapper[4941]: I1130 08:41:32.533044 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:33 crc kubenswrapper[4941]: I1130 08:41:33.138492 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-58kvt"] Nov 30 08:41:34 crc kubenswrapper[4941]: I1130 08:41:34.074478 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-58kvt" event={"ID":"cda00e1a-68e9-45f3-85f8-840fc4614400","Type":"ContainerStarted","Data":"8b17875a705690456c4201d2c662ae39dc6b37bec1c735bef3a22d1926f57d35"} Nov 30 08:41:34 crc kubenswrapper[4941]: I1130 08:41:34.075464 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-58kvt" event={"ID":"cda00e1a-68e9-45f3-85f8-840fc4614400","Type":"ContainerStarted","Data":"6d17ad393bf946724c8fd663d2c014b4946cd8c6e5a71c9c288fc91c54bb0343"} Nov 30 08:41:34 crc kubenswrapper[4941]: I1130 08:41:34.094903 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-58kvt" podStartSLOduration=1.624951104 podStartE2EDuration="2.09487876s" podCreationTimestamp="2025-11-30 08:41:32 +0000 UTC" firstStartedPulling="2025-11-30 08:41:33.154139171 +0000 UTC m=+6913.922310780" lastFinishedPulling="2025-11-30 08:41:33.624066827 +0000 UTC m=+6914.392238436" observedRunningTime="2025-11-30 08:41:34.094209499 +0000 UTC m=+6914.862381138" watchObservedRunningTime="2025-11-30 08:41:34.09487876 +0000 UTC m=+6914.863050369" Nov 30 08:41:40 crc kubenswrapper[4941]: I1130 08:41:40.523513 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:41:40 crc kubenswrapper[4941]: E1130 08:41:40.527715 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:41:43 crc kubenswrapper[4941]: I1130 08:41:43.195031 4941 generic.go:334] "Generic (PLEG): container finished" podID="cda00e1a-68e9-45f3-85f8-840fc4614400" containerID="8b17875a705690456c4201d2c662ae39dc6b37bec1c735bef3a22d1926f57d35" exitCode=0 Nov 30 08:41:43 crc kubenswrapper[4941]: I1130 08:41:43.195137 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-58kvt" event={"ID":"cda00e1a-68e9-45f3-85f8-840fc4614400","Type":"ContainerDied","Data":"8b17875a705690456c4201d2c662ae39dc6b37bec1c735bef3a22d1926f57d35"} Nov 30 08:41:44 crc kubenswrapper[4941]: I1130 08:41:44.871203 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.025013 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsj5g\" (UniqueName: \"kubernetes.io/projected/cda00e1a-68e9-45f3-85f8-840fc4614400-kube-api-access-lsj5g\") pod \"cda00e1a-68e9-45f3-85f8-840fc4614400\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.025392 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ssh-key-openstack-cell1\") pod \"cda00e1a-68e9-45f3-85f8-840fc4614400\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.025478 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-inventory-0\") pod \"cda00e1a-68e9-45f3-85f8-840fc4614400\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.025549 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ceph\") pod \"cda00e1a-68e9-45f3-85f8-840fc4614400\" (UID: \"cda00e1a-68e9-45f3-85f8-840fc4614400\") " Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.034296 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ceph" (OuterVolumeSpecName: "ceph") pod "cda00e1a-68e9-45f3-85f8-840fc4614400" (UID: "cda00e1a-68e9-45f3-85f8-840fc4614400"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.034633 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda00e1a-68e9-45f3-85f8-840fc4614400-kube-api-access-lsj5g" (OuterVolumeSpecName: "kube-api-access-lsj5g") pod "cda00e1a-68e9-45f3-85f8-840fc4614400" (UID: "cda00e1a-68e9-45f3-85f8-840fc4614400"). InnerVolumeSpecName "kube-api-access-lsj5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.059394 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "cda00e1a-68e9-45f3-85f8-840fc4614400" (UID: "cda00e1a-68e9-45f3-85f8-840fc4614400"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.083431 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "cda00e1a-68e9-45f3-85f8-840fc4614400" (UID: "cda00e1a-68e9-45f3-85f8-840fc4614400"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.131963 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.132420 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsj5g\" (UniqueName: \"kubernetes.io/projected/cda00e1a-68e9-45f3-85f8-840fc4614400-kube-api-access-lsj5g\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.132517 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.132760 4941 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/cda00e1a-68e9-45f3-85f8-840fc4614400-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.229861 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-58kvt" event={"ID":"cda00e1a-68e9-45f3-85f8-840fc4614400","Type":"ContainerDied","Data":"6d17ad393bf946724c8fd663d2c014b4946cd8c6e5a71c9c288fc91c54bb0343"} Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.229909 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d17ad393bf946724c8fd663d2c014b4946cd8c6e5a71c9c288fc91c54bb0343" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.229979 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-58kvt" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.331123 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-4s4c4"] Nov 30 08:41:45 crc kubenswrapper[4941]: E1130 08:41:45.331749 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda00e1a-68e9-45f3-85f8-840fc4614400" containerName="ssh-known-hosts-openstack" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.331775 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda00e1a-68e9-45f3-85f8-840fc4614400" containerName="ssh-known-hosts-openstack" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.332049 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda00e1a-68e9-45f3-85f8-840fc4614400" containerName="ssh-known-hosts-openstack" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.333100 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.336471 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.337405 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.338414 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.338740 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.359599 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-4s4c4"] Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.438870 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ceph\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.438945 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ssh-key\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.439099 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmrml\" (UniqueName: \"kubernetes.io/projected/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-kube-api-access-qmrml\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.439338 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-inventory\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.542379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-inventory\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.542638 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ceph\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.542676 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ssh-key\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.542697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmrml\" (UniqueName: \"kubernetes.io/projected/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-kube-api-access-qmrml\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.550106 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ssh-key\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.550215 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ceph\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.550588 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-inventory\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.560544 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmrml\" (UniqueName: \"kubernetes.io/projected/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-kube-api-access-qmrml\") pod \"run-os-openstack-openstack-cell1-4s4c4\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:45 crc kubenswrapper[4941]: I1130 08:41:45.651871 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:46 crc kubenswrapper[4941]: I1130 08:41:46.243651 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-4s4c4"] Nov 30 08:41:47 crc kubenswrapper[4941]: I1130 08:41:47.272719 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" event={"ID":"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e","Type":"ContainerStarted","Data":"d3245fb90514003cc32227ccd0dadde9d1293fa140dc1d26df7f5e1b12d202b9"} Nov 30 08:41:48 crc kubenswrapper[4941]: I1130 08:41:48.287258 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" event={"ID":"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e","Type":"ContainerStarted","Data":"8ab4a5c772ccde47b090649830353c82b2146f159049316c99c7d3029463b403"} Nov 30 08:41:48 crc kubenswrapper[4941]: I1130 08:41:48.318942 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" podStartSLOduration=2.409197773 podStartE2EDuration="3.318918843s" podCreationTimestamp="2025-11-30 08:41:45 +0000 UTC" firstStartedPulling="2025-11-30 08:41:46.246527645 +0000 UTC m=+6927.014699244" lastFinishedPulling="2025-11-30 08:41:47.156248705 +0000 UTC m=+6927.924420314" observedRunningTime="2025-11-30 08:41:48.303586759 +0000 UTC m=+6929.071758368" watchObservedRunningTime="2025-11-30 08:41:48.318918843 +0000 UTC m=+6929.087090452" Nov 30 08:41:53 crc kubenswrapper[4941]: I1130 08:41:53.522379 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:41:53 crc kubenswrapper[4941]: E1130 08:41:53.523627 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:41:55 crc kubenswrapper[4941]: I1130 08:41:55.374666 4941 generic.go:334] "Generic (PLEG): container finished" podID="e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" containerID="8ab4a5c772ccde47b090649830353c82b2146f159049316c99c7d3029463b403" exitCode=0 Nov 30 08:41:55 crc kubenswrapper[4941]: I1130 08:41:55.374746 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" event={"ID":"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e","Type":"ContainerDied","Data":"8ab4a5c772ccde47b090649830353c82b2146f159049316c99c7d3029463b403"} Nov 30 08:41:56 crc kubenswrapper[4941]: I1130 08:41:56.917987 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.094697 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ssh-key\") pod \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.095161 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ceph\") pod \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.095242 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-inventory\") pod \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.095534 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmrml\" (UniqueName: \"kubernetes.io/projected/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-kube-api-access-qmrml\") pod \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\" (UID: \"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e\") " Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.100681 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-kube-api-access-qmrml" (OuterVolumeSpecName: "kube-api-access-qmrml") pod "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" (UID: "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e"). InnerVolumeSpecName "kube-api-access-qmrml". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.101937 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ceph" (OuterVolumeSpecName: "ceph") pod "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" (UID: "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.124616 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" (UID: "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.129268 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-inventory" (OuterVolumeSpecName: "inventory") pod "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" (UID: "e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.198091 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmrml\" (UniqueName: \"kubernetes.io/projected/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-kube-api-access-qmrml\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.198130 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.198140 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.198148 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.408353 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" event={"ID":"e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e","Type":"ContainerDied","Data":"d3245fb90514003cc32227ccd0dadde9d1293fa140dc1d26df7f5e1b12d202b9"} Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.408729 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3245fb90514003cc32227ccd0dadde9d1293fa140dc1d26df7f5e1b12d202b9" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.409022 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-4s4c4" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.477845 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-gncsn"] Nov 30 08:41:57 crc kubenswrapper[4941]: E1130 08:41:57.478286 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" containerName="run-os-openstack-openstack-cell1" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.478303 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" containerName="run-os-openstack-openstack-cell1" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.478563 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e" containerName="run-os-openstack-openstack-cell1" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.479372 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.481567 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.482676 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.488960 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.489193 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.495031 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-gncsn"] Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.610862 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjbgd\" (UniqueName: \"kubernetes.io/projected/43bbf907-a7fd-45c7-ac8d-45259069d8ca-kube-api-access-wjbgd\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.611305 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-inventory\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.611367 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ceph\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.611402 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.713964 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-inventory\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.714314 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ceph\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.714825 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.715174 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjbgd\" (UniqueName: \"kubernetes.io/projected/43bbf907-a7fd-45c7-ac8d-45259069d8ca-kube-api-access-wjbgd\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.719240 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.720915 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ceph\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.720924 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-inventory\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.733801 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjbgd\" (UniqueName: \"kubernetes.io/projected/43bbf907-a7fd-45c7-ac8d-45259069d8ca-kube-api-access-wjbgd\") pod \"reboot-os-openstack-openstack-cell1-gncsn\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:57 crc kubenswrapper[4941]: I1130 08:41:57.807294 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:41:58 crc kubenswrapper[4941]: I1130 08:41:58.365192 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-gncsn"] Nov 30 08:41:58 crc kubenswrapper[4941]: I1130 08:41:58.419496 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" event={"ID":"43bbf907-a7fd-45c7-ac8d-45259069d8ca","Type":"ContainerStarted","Data":"1fd8ce93b90eb93055ec4b6eb641b8388d20bd5926e8cf9b6ef80cb9ad9f0b1c"} Nov 30 08:41:59 crc kubenswrapper[4941]: I1130 08:41:59.433307 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" event={"ID":"43bbf907-a7fd-45c7-ac8d-45259069d8ca","Type":"ContainerStarted","Data":"23acf00e77111b71814da043e716bbc86e2083ac8542ab117026038c26049804"} Nov 30 08:41:59 crc kubenswrapper[4941]: I1130 08:41:59.479586 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" podStartSLOduration=1.8380213140000001 podStartE2EDuration="2.479558624s" podCreationTimestamp="2025-11-30 08:41:57 +0000 UTC" firstStartedPulling="2025-11-30 08:41:58.373056261 +0000 UTC m=+6939.141227880" lastFinishedPulling="2025-11-30 08:41:59.014593581 +0000 UTC m=+6939.782765190" observedRunningTime="2025-11-30 08:41:59.452526507 +0000 UTC m=+6940.220698136" watchObservedRunningTime="2025-11-30 08:41:59.479558624 +0000 UTC m=+6940.247730243" Nov 30 08:42:06 crc kubenswrapper[4941]: I1130 08:42:06.523033 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:42:06 crc kubenswrapper[4941]: E1130 08:42:06.524663 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.334304 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r2pbx"] Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.337632 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.372949 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r2pbx"] Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.451936 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmq4n\" (UniqueName: \"kubernetes.io/projected/fca500c2-a950-469c-9cfd-9c5ad51b0e52-kube-api-access-pmq4n\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.452025 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-catalog-content\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.452066 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-utilities\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.553776 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-catalog-content\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.553864 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-utilities\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.554107 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmq4n\" (UniqueName: \"kubernetes.io/projected/fca500c2-a950-469c-9cfd-9c5ad51b0e52-kube-api-access-pmq4n\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.554554 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-catalog-content\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.554660 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-utilities\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.598648 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmq4n\" (UniqueName: \"kubernetes.io/projected/fca500c2-a950-469c-9cfd-9c5ad51b0e52-kube-api-access-pmq4n\") pod \"community-operators-r2pbx\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:10 crc kubenswrapper[4941]: I1130 08:42:10.667431 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:11 crc kubenswrapper[4941]: I1130 08:42:11.282873 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r2pbx"] Nov 30 08:42:11 crc kubenswrapper[4941]: I1130 08:42:11.581371 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerStarted","Data":"61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2"} Nov 30 08:42:11 crc kubenswrapper[4941]: I1130 08:42:11.581871 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerStarted","Data":"a648ecd8018bab05553d613ff67a4a6e0151520ed7de4d1d28ced5c884dd664c"} Nov 30 08:42:12 crc kubenswrapper[4941]: I1130 08:42:12.593043 4941 generic.go:334] "Generic (PLEG): container finished" podID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerID="61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2" exitCode=0 Nov 30 08:42:12 crc kubenswrapper[4941]: I1130 08:42:12.593130 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerDied","Data":"61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2"} Nov 30 08:42:14 crc kubenswrapper[4941]: I1130 08:42:14.652026 4941 generic.go:334] "Generic (PLEG): container finished" podID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerID="d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a" exitCode=0 Nov 30 08:42:14 crc kubenswrapper[4941]: I1130 08:42:14.652093 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerDied","Data":"d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a"} Nov 30 08:42:15 crc kubenswrapper[4941]: I1130 08:42:15.672903 4941 generic.go:334] "Generic (PLEG): container finished" podID="43bbf907-a7fd-45c7-ac8d-45259069d8ca" containerID="23acf00e77111b71814da043e716bbc86e2083ac8542ab117026038c26049804" exitCode=0 Nov 30 08:42:15 crc kubenswrapper[4941]: I1130 08:42:15.673018 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" event={"ID":"43bbf907-a7fd-45c7-ac8d-45259069d8ca","Type":"ContainerDied","Data":"23acf00e77111b71814da043e716bbc86e2083ac8542ab117026038c26049804"} Nov 30 08:42:16 crc kubenswrapper[4941]: I1130 08:42:16.687495 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerStarted","Data":"242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216"} Nov 30 08:42:16 crc kubenswrapper[4941]: I1130 08:42:16.721901 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r2pbx" podStartSLOduration=3.413603741 podStartE2EDuration="6.721880193s" podCreationTimestamp="2025-11-30 08:42:10 +0000 UTC" firstStartedPulling="2025-11-30 08:42:12.596485614 +0000 UTC m=+6953.364657223" lastFinishedPulling="2025-11-30 08:42:15.904762066 +0000 UTC m=+6956.672933675" observedRunningTime="2025-11-30 08:42:16.713539235 +0000 UTC m=+6957.481710874" watchObservedRunningTime="2025-11-30 08:42:16.721880193 +0000 UTC m=+6957.490051802" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.207378 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.330870 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-inventory\") pod \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.330976 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ceph\") pod \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.331024 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ssh-key\") pod \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.331137 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjbgd\" (UniqueName: \"kubernetes.io/projected/43bbf907-a7fd-45c7-ac8d-45259069d8ca-kube-api-access-wjbgd\") pod \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\" (UID: \"43bbf907-a7fd-45c7-ac8d-45259069d8ca\") " Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.340010 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43bbf907-a7fd-45c7-ac8d-45259069d8ca-kube-api-access-wjbgd" (OuterVolumeSpecName: "kube-api-access-wjbgd") pod "43bbf907-a7fd-45c7-ac8d-45259069d8ca" (UID: "43bbf907-a7fd-45c7-ac8d-45259069d8ca"). InnerVolumeSpecName "kube-api-access-wjbgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.349663 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ceph" (OuterVolumeSpecName: "ceph") pod "43bbf907-a7fd-45c7-ac8d-45259069d8ca" (UID: "43bbf907-a7fd-45c7-ac8d-45259069d8ca"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.365103 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-inventory" (OuterVolumeSpecName: "inventory") pod "43bbf907-a7fd-45c7-ac8d-45259069d8ca" (UID: "43bbf907-a7fd-45c7-ac8d-45259069d8ca"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.386312 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "43bbf907-a7fd-45c7-ac8d-45259069d8ca" (UID: "43bbf907-a7fd-45c7-ac8d-45259069d8ca"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.433976 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.434165 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjbgd\" (UniqueName: \"kubernetes.io/projected/43bbf907-a7fd-45c7-ac8d-45259069d8ca-kube-api-access-wjbgd\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.434227 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.434316 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/43bbf907-a7fd-45c7-ac8d-45259069d8ca-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.702596 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.702583 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-gncsn" event={"ID":"43bbf907-a7fd-45c7-ac8d-45259069d8ca","Type":"ContainerDied","Data":"1fd8ce93b90eb93055ec4b6eb641b8388d20bd5926e8cf9b6ef80cb9ad9f0b1c"} Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.703061 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fd8ce93b90eb93055ec4b6eb641b8388d20bd5926e8cf9b6ef80cb9ad9f0b1c" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.813571 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-ktf6t"] Nov 30 08:42:17 crc kubenswrapper[4941]: E1130 08:42:17.815221 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43bbf907-a7fd-45c7-ac8d-45259069d8ca" containerName="reboot-os-openstack-openstack-cell1" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.815246 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="43bbf907-a7fd-45c7-ac8d-45259069d8ca" containerName="reboot-os-openstack-openstack-cell1" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.815710 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="43bbf907-a7fd-45c7-ac8d-45259069d8ca" containerName="reboot-os-openstack-openstack-cell1" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.817297 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.819804 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.820058 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.820175 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.821172 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.833447 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-ktf6t"] Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.947957 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-inventory\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948025 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948069 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ceph\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948160 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948218 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948399 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ssh-key\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948505 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948636 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl568\" (UniqueName: \"kubernetes.io/projected/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-kube-api-access-jl568\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.948809 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.949113 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.949166 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:17 crc kubenswrapper[4941]: I1130 08:42:17.949189 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052077 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-inventory\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052141 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052179 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ceph\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052216 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052256 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052282 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ssh-key\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052301 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052351 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl568\" (UniqueName: \"kubernetes.io/projected/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-kube-api-access-jl568\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052394 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052455 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052477 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.052497 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.058456 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.058806 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.060262 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ssh-key\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.061003 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.061141 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.061760 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.062299 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-inventory\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.064406 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ceph\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.066431 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.066980 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.071039 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl568\" (UniqueName: \"kubernetes.io/projected/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-kube-api-access-jl568\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.073669 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-ktf6t\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.136499 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.521694 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:42:18 crc kubenswrapper[4941]: E1130 08:42:18.522483 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:42:18 crc kubenswrapper[4941]: W1130 08:42:18.761142 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0a57fdf_dbf3_4dec_8079_ecfffe844fa3.slice/crio-2f151c6cfa1cffc78cbf47f954587ebe950d9d7cfd680651e49673e8cc772a31 WatchSource:0}: Error finding container 2f151c6cfa1cffc78cbf47f954587ebe950d9d7cfd680651e49673e8cc772a31: Status 404 returned error can't find the container with id 2f151c6cfa1cffc78cbf47f954587ebe950d9d7cfd680651e49673e8cc772a31 Nov 30 08:42:18 crc kubenswrapper[4941]: I1130 08:42:18.762066 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-ktf6t"] Nov 30 08:42:19 crc kubenswrapper[4941]: I1130 08:42:19.583194 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:42:19 crc kubenswrapper[4941]: I1130 08:42:19.729038 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" event={"ID":"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3","Type":"ContainerStarted","Data":"2f151c6cfa1cffc78cbf47f954587ebe950d9d7cfd680651e49673e8cc772a31"} Nov 30 08:42:20 crc kubenswrapper[4941]: I1130 08:42:20.668642 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:20 crc kubenswrapper[4941]: I1130 08:42:20.669101 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:20 crc kubenswrapper[4941]: I1130 08:42:20.749664 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:20 crc kubenswrapper[4941]: I1130 08:42:20.769982 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" event={"ID":"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3","Type":"ContainerStarted","Data":"77dd2b3be86793b628581645a0b8360b4b9d95d710819bde1e6795f42120adda"} Nov 30 08:42:20 crc kubenswrapper[4941]: I1130 08:42:20.828400 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" podStartSLOduration=3.012976152 podStartE2EDuration="3.828373737s" podCreationTimestamp="2025-11-30 08:42:17 +0000 UTC" firstStartedPulling="2025-11-30 08:42:18.764015166 +0000 UTC m=+6959.532186775" lastFinishedPulling="2025-11-30 08:42:19.579412751 +0000 UTC m=+6960.347584360" observedRunningTime="2025-11-30 08:42:20.814855779 +0000 UTC m=+6961.583027388" watchObservedRunningTime="2025-11-30 08:42:20.828373737 +0000 UTC m=+6961.596545356" Nov 30 08:42:30 crc kubenswrapper[4941]: I1130 08:42:30.722626 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:30 crc kubenswrapper[4941]: I1130 08:42:30.782460 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r2pbx"] Nov 30 08:42:30 crc kubenswrapper[4941]: I1130 08:42:30.891842 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r2pbx" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="registry-server" containerID="cri-o://242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216" gracePeriod=2 Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.503266 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.522403 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:42:31 crc kubenswrapper[4941]: E1130 08:42:31.522917 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.693196 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmq4n\" (UniqueName: \"kubernetes.io/projected/fca500c2-a950-469c-9cfd-9c5ad51b0e52-kube-api-access-pmq4n\") pod \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.693668 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-catalog-content\") pod \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.693752 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-utilities\") pod \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\" (UID: \"fca500c2-a950-469c-9cfd-9c5ad51b0e52\") " Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.695908 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-utilities" (OuterVolumeSpecName: "utilities") pod "fca500c2-a950-469c-9cfd-9c5ad51b0e52" (UID: "fca500c2-a950-469c-9cfd-9c5ad51b0e52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.700521 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fca500c2-a950-469c-9cfd-9c5ad51b0e52-kube-api-access-pmq4n" (OuterVolumeSpecName: "kube-api-access-pmq4n") pod "fca500c2-a950-469c-9cfd-9c5ad51b0e52" (UID: "fca500c2-a950-469c-9cfd-9c5ad51b0e52"). InnerVolumeSpecName "kube-api-access-pmq4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.744113 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fca500c2-a950-469c-9cfd-9c5ad51b0e52" (UID: "fca500c2-a950-469c-9cfd-9c5ad51b0e52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.796942 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmq4n\" (UniqueName: \"kubernetes.io/projected/fca500c2-a950-469c-9cfd-9c5ad51b0e52-kube-api-access-pmq4n\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.796990 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.797003 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca500c2-a950-469c-9cfd-9c5ad51b0e52-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.908777 4941 generic.go:334] "Generic (PLEG): container finished" podID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerID="242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216" exitCode=0 Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.908825 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerDied","Data":"242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216"} Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.908858 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r2pbx" event={"ID":"fca500c2-a950-469c-9cfd-9c5ad51b0e52","Type":"ContainerDied","Data":"a648ecd8018bab05553d613ff67a4a6e0151520ed7de4d1d28ced5c884dd664c"} Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.908865 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r2pbx" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.908876 4941 scope.go:117] "RemoveContainer" containerID="242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.939859 4941 scope.go:117] "RemoveContainer" containerID="d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a" Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.963997 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r2pbx"] Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.981249 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r2pbx"] Nov 30 08:42:31 crc kubenswrapper[4941]: I1130 08:42:31.998707 4941 scope.go:117] "RemoveContainer" containerID="61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2" Nov 30 08:42:32 crc kubenswrapper[4941]: I1130 08:42:32.054901 4941 scope.go:117] "RemoveContainer" containerID="242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216" Nov 30 08:42:32 crc kubenswrapper[4941]: E1130 08:42:32.055622 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216\": container with ID starting with 242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216 not found: ID does not exist" containerID="242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216" Nov 30 08:42:32 crc kubenswrapper[4941]: I1130 08:42:32.055678 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216"} err="failed to get container status \"242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216\": rpc error: code = NotFound desc = could not find container \"242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216\": container with ID starting with 242e3cd5e261ebac909da28a7402a3fbb2d68a0cb43d11a8da7b0406c153e216 not found: ID does not exist" Nov 30 08:42:32 crc kubenswrapper[4941]: I1130 08:42:32.055714 4941 scope.go:117] "RemoveContainer" containerID="d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a" Nov 30 08:42:32 crc kubenswrapper[4941]: E1130 08:42:32.056240 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a\": container with ID starting with d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a not found: ID does not exist" containerID="d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a" Nov 30 08:42:32 crc kubenswrapper[4941]: I1130 08:42:32.056323 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a"} err="failed to get container status \"d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a\": rpc error: code = NotFound desc = could not find container \"d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a\": container with ID starting with d9d784b3ae96e003d4f029881d99b5a0effaacf615fe63e1018b63b42abce79a not found: ID does not exist" Nov 30 08:42:32 crc kubenswrapper[4941]: I1130 08:42:32.056403 4941 scope.go:117] "RemoveContainer" containerID="61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2" Nov 30 08:42:32 crc kubenswrapper[4941]: E1130 08:42:32.056956 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2\": container with ID starting with 61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2 not found: ID does not exist" containerID="61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2" Nov 30 08:42:32 crc kubenswrapper[4941]: I1130 08:42:32.056998 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2"} err="failed to get container status \"61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2\": rpc error: code = NotFound desc = could not find container \"61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2\": container with ID starting with 61fd09929a7bc63b2fce3b88f2503379d054444bf6cb18353a5a1f60fee8a4b2 not found: ID does not exist" Nov 30 08:42:33 crc kubenswrapper[4941]: I1130 08:42:33.547044 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" path="/var/lib/kubelet/pods/fca500c2-a950-469c-9cfd-9c5ad51b0e52/volumes" Nov 30 08:42:40 crc kubenswrapper[4941]: I1130 08:42:40.032118 4941 generic.go:334] "Generic (PLEG): container finished" podID="a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" containerID="77dd2b3be86793b628581645a0b8360b4b9d95d710819bde1e6795f42120adda" exitCode=0 Nov 30 08:42:40 crc kubenswrapper[4941]: I1130 08:42:40.032251 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" event={"ID":"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3","Type":"ContainerDied","Data":"77dd2b3be86793b628581645a0b8360b4b9d95d710819bde1e6795f42120adda"} Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.534483 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.716804 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ceph\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.716956 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-nova-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717003 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ssh-key\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717071 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-libvirt-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717507 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-bootstrap-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717563 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-sriov-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717612 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ovn-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717658 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl568\" (UniqueName: \"kubernetes.io/projected/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-kube-api-access-jl568\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717703 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-telemetry-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717757 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-metadata-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.717781 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-dhcp-combined-ca-bundle\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.721682 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-inventory\") pod \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\" (UID: \"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3\") " Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.723986 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ceph" (OuterVolumeSpecName: "ceph") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.724981 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.725807 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-kube-api-access-jl568" (OuterVolumeSpecName: "kube-api-access-jl568") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "kube-api-access-jl568". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.726318 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.727091 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.727870 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.727891 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.728819 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.730100 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.732098 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.763505 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.791221 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-inventory" (OuterVolumeSpecName: "inventory") pod "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" (UID: "a0a57fdf-dbf3-4dec-8079-ecfffe844fa3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826252 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826291 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826305 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl568\" (UniqueName: \"kubernetes.io/projected/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-kube-api-access-jl568\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826319 4941 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826345 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826357 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826368 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826380 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826391 4941 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826403 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826412 4941 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:41 crc kubenswrapper[4941]: I1130 08:42:41.826422 4941 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0a57fdf-dbf3-4dec-8079-ecfffe844fa3-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.057594 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" event={"ID":"a0a57fdf-dbf3-4dec-8079-ecfffe844fa3","Type":"ContainerDied","Data":"2f151c6cfa1cffc78cbf47f954587ebe950d9d7cfd680651e49673e8cc772a31"} Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.057640 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f151c6cfa1cffc78cbf47f954587ebe950d9d7cfd680651e49673e8cc772a31" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.057716 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-ktf6t" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.190811 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-kjp4b"] Nov 30 08:42:42 crc kubenswrapper[4941]: E1130 08:42:42.192120 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="registry-server" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.192152 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="registry-server" Nov 30 08:42:42 crc kubenswrapper[4941]: E1130 08:42:42.192175 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" containerName="install-certs-openstack-openstack-cell1" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.192184 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" containerName="install-certs-openstack-openstack-cell1" Nov 30 08:42:42 crc kubenswrapper[4941]: E1130 08:42:42.192200 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="extract-utilities" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.192207 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="extract-utilities" Nov 30 08:42:42 crc kubenswrapper[4941]: E1130 08:42:42.192243 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="extract-content" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.192251 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="extract-content" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.192584 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a57fdf-dbf3-4dec-8079-ecfffe844fa3" containerName="install-certs-openstack-openstack-cell1" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.192624 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fca500c2-a950-469c-9cfd-9c5ad51b0e52" containerName="registry-server" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.193924 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.202663 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-kjp4b"] Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.235145 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.235464 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.235481 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.235594 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.239049 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.239210 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-inventory\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.239366 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ceph\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.239569 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb5fb\" (UniqueName: \"kubernetes.io/projected/2bb75047-8a2a-4187-a104-95b1a83c4b02-kube-api-access-rb5fb\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.341697 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.341786 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-inventory\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.341833 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ceph\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.341875 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb5fb\" (UniqueName: \"kubernetes.io/projected/2bb75047-8a2a-4187-a104-95b1a83c4b02-kube-api-access-rb5fb\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.345945 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-inventory\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.346285 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ceph\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.347821 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.366768 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb5fb\" (UniqueName: \"kubernetes.io/projected/2bb75047-8a2a-4187-a104-95b1a83c4b02-kube-api-access-rb5fb\") pod \"ceph-client-openstack-openstack-cell1-kjp4b\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:42 crc kubenswrapper[4941]: I1130 08:42:42.556187 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:43 crc kubenswrapper[4941]: I1130 08:42:43.138280 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-kjp4b"] Nov 30 08:42:44 crc kubenswrapper[4941]: I1130 08:42:44.086780 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" event={"ID":"2bb75047-8a2a-4187-a104-95b1a83c4b02","Type":"ContainerStarted","Data":"4ca709cec84bfebe9a953b8350b41ebd0006f346f08f550102b94f6c84fad840"} Nov 30 08:42:44 crc kubenswrapper[4941]: I1130 08:42:44.087352 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" event={"ID":"2bb75047-8a2a-4187-a104-95b1a83c4b02","Type":"ContainerStarted","Data":"7138eaea3ba5ca20c79b987943c48413b96aeaa5ed047a2e869a5514ee1bd846"} Nov 30 08:42:44 crc kubenswrapper[4941]: I1130 08:42:44.123467 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" podStartSLOduration=1.620222404 podStartE2EDuration="2.123433659s" podCreationTimestamp="2025-11-30 08:42:42 +0000 UTC" firstStartedPulling="2025-11-30 08:42:43.141177626 +0000 UTC m=+6983.909349275" lastFinishedPulling="2025-11-30 08:42:43.644388881 +0000 UTC m=+6984.412560530" observedRunningTime="2025-11-30 08:42:44.105360681 +0000 UTC m=+6984.873532340" watchObservedRunningTime="2025-11-30 08:42:44.123433659 +0000 UTC m=+6984.891605308" Nov 30 08:42:44 crc kubenswrapper[4941]: I1130 08:42:44.522057 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:42:44 crc kubenswrapper[4941]: E1130 08:42:44.523186 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:42:49 crc kubenswrapper[4941]: I1130 08:42:49.155814 4941 generic.go:334] "Generic (PLEG): container finished" podID="2bb75047-8a2a-4187-a104-95b1a83c4b02" containerID="4ca709cec84bfebe9a953b8350b41ebd0006f346f08f550102b94f6c84fad840" exitCode=0 Nov 30 08:42:49 crc kubenswrapper[4941]: I1130 08:42:49.155911 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" event={"ID":"2bb75047-8a2a-4187-a104-95b1a83c4b02","Type":"ContainerDied","Data":"4ca709cec84bfebe9a953b8350b41ebd0006f346f08f550102b94f6c84fad840"} Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.667033 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.769240 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-inventory\") pod \"2bb75047-8a2a-4187-a104-95b1a83c4b02\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.769397 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb5fb\" (UniqueName: \"kubernetes.io/projected/2bb75047-8a2a-4187-a104-95b1a83c4b02-kube-api-access-rb5fb\") pod \"2bb75047-8a2a-4187-a104-95b1a83c4b02\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.769465 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ssh-key\") pod \"2bb75047-8a2a-4187-a104-95b1a83c4b02\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.769629 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ceph\") pod \"2bb75047-8a2a-4187-a104-95b1a83c4b02\" (UID: \"2bb75047-8a2a-4187-a104-95b1a83c4b02\") " Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.776907 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ceph" (OuterVolumeSpecName: "ceph") pod "2bb75047-8a2a-4187-a104-95b1a83c4b02" (UID: "2bb75047-8a2a-4187-a104-95b1a83c4b02"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.786504 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bb75047-8a2a-4187-a104-95b1a83c4b02-kube-api-access-rb5fb" (OuterVolumeSpecName: "kube-api-access-rb5fb") pod "2bb75047-8a2a-4187-a104-95b1a83c4b02" (UID: "2bb75047-8a2a-4187-a104-95b1a83c4b02"). InnerVolumeSpecName "kube-api-access-rb5fb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.812901 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2bb75047-8a2a-4187-a104-95b1a83c4b02" (UID: "2bb75047-8a2a-4187-a104-95b1a83c4b02"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.815858 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-inventory" (OuterVolumeSpecName: "inventory") pod "2bb75047-8a2a-4187-a104-95b1a83c4b02" (UID: "2bb75047-8a2a-4187-a104-95b1a83c4b02"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.873668 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb5fb\" (UniqueName: \"kubernetes.io/projected/2bb75047-8a2a-4187-a104-95b1a83c4b02-kube-api-access-rb5fb\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.873706 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.873715 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:50 crc kubenswrapper[4941]: I1130 08:42:50.873724 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb75047-8a2a-4187-a104-95b1a83c4b02-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.180042 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" event={"ID":"2bb75047-8a2a-4187-a104-95b1a83c4b02","Type":"ContainerDied","Data":"7138eaea3ba5ca20c79b987943c48413b96aeaa5ed047a2e869a5514ee1bd846"} Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.180102 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7138eaea3ba5ca20c79b987943c48413b96aeaa5ed047a2e869a5514ee1bd846" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.180805 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-kjp4b" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.279761 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-hj9k4"] Nov 30 08:42:51 crc kubenswrapper[4941]: E1130 08:42:51.280543 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bb75047-8a2a-4187-a104-95b1a83c4b02" containerName="ceph-client-openstack-openstack-cell1" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.280569 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bb75047-8a2a-4187-a104-95b1a83c4b02" containerName="ceph-client-openstack-openstack-cell1" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.280901 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bb75047-8a2a-4187-a104-95b1a83c4b02" containerName="ceph-client-openstack-openstack-cell1" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.282067 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.284011 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.284154 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.284251 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ceph\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.284288 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ssh-key\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.284351 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-inventory\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.284505 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdtbc\" (UniqueName: \"kubernetes.io/projected/3bf253a3-5fac-4f21-bfdf-78179dd8c647-kube-api-access-vdtbc\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.285717 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.285952 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.286079 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.295587 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.299238 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.317800 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-hj9k4"] Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.386620 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ceph\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.386679 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ssh-key\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.386764 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-inventory\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.386968 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdtbc\" (UniqueName: \"kubernetes.io/projected/3bf253a3-5fac-4f21-bfdf-78179dd8c647-kube-api-access-vdtbc\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.387030 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.387059 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.389820 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.392528 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ssh-key\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.392833 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-inventory\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.393810 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.395016 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ceph\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.408974 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdtbc\" (UniqueName: \"kubernetes.io/projected/3bf253a3-5fac-4f21-bfdf-78179dd8c647-kube-api-access-vdtbc\") pod \"ovn-openstack-openstack-cell1-hj9k4\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:51 crc kubenswrapper[4941]: I1130 08:42:51.614825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:42:52 crc kubenswrapper[4941]: I1130 08:42:52.266481 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-hj9k4"] Nov 30 08:42:53 crc kubenswrapper[4941]: I1130 08:42:53.223358 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" event={"ID":"3bf253a3-5fac-4f21-bfdf-78179dd8c647","Type":"ContainerStarted","Data":"094436c7e714f15dfb3fe7ea29ac078646ff99a45e519c66d5a518e7e5cc99c2"} Nov 30 08:42:53 crc kubenswrapper[4941]: I1130 08:42:53.223421 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" event={"ID":"3bf253a3-5fac-4f21-bfdf-78179dd8c647","Type":"ContainerStarted","Data":"25bc919478d503622ace4678b3b01c57fee7349666178a31fdc84b9251194b18"} Nov 30 08:42:53 crc kubenswrapper[4941]: I1130 08:42:53.263937 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" podStartSLOduration=1.665060656 podStartE2EDuration="2.263911457s" podCreationTimestamp="2025-11-30 08:42:51 +0000 UTC" firstStartedPulling="2025-11-30 08:42:52.266460415 +0000 UTC m=+6993.034632064" lastFinishedPulling="2025-11-30 08:42:52.865311266 +0000 UTC m=+6993.633482865" observedRunningTime="2025-11-30 08:42:53.245975543 +0000 UTC m=+6994.014147162" watchObservedRunningTime="2025-11-30 08:42:53.263911457 +0000 UTC m=+6994.032083076" Nov 30 08:42:58 crc kubenswrapper[4941]: I1130 08:42:58.521280 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:42:58 crc kubenswrapper[4941]: E1130 08:42:58.522048 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:43:11 crc kubenswrapper[4941]: I1130 08:43:11.522659 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:43:11 crc kubenswrapper[4941]: E1130 08:43:11.523762 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:43:25 crc kubenswrapper[4941]: I1130 08:43:25.521726 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:43:25 crc kubenswrapper[4941]: E1130 08:43:25.522535 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:43:40 crc kubenswrapper[4941]: I1130 08:43:40.522464 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:43:40 crc kubenswrapper[4941]: I1130 08:43:40.923904 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"83c532f6891919fbe101ca53964d4545bf26f192c23b11204d7a74e87bf589c4"} Nov 30 08:44:02 crc kubenswrapper[4941]: I1130 08:44:02.197208 4941 generic.go:334] "Generic (PLEG): container finished" podID="3bf253a3-5fac-4f21-bfdf-78179dd8c647" containerID="094436c7e714f15dfb3fe7ea29ac078646ff99a45e519c66d5a518e7e5cc99c2" exitCode=0 Nov 30 08:44:02 crc kubenswrapper[4941]: I1130 08:44:02.197307 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" event={"ID":"3bf253a3-5fac-4f21-bfdf-78179dd8c647","Type":"ContainerDied","Data":"094436c7e714f15dfb3fe7ea29ac078646ff99a45e519c66d5a518e7e5cc99c2"} Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.848468 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.988840 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovncontroller-config-0\") pod \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.989025 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovn-combined-ca-bundle\") pod \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.989996 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdtbc\" (UniqueName: \"kubernetes.io/projected/3bf253a3-5fac-4f21-bfdf-78179dd8c647-kube-api-access-vdtbc\") pod \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.990105 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ssh-key\") pod \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.990352 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ceph\") pod \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.990508 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-inventory\") pod \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\" (UID: \"3bf253a3-5fac-4f21-bfdf-78179dd8c647\") " Nov 30 08:44:03 crc kubenswrapper[4941]: I1130 08:44:03.996408 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3bf253a3-5fac-4f21-bfdf-78179dd8c647" (UID: "3bf253a3-5fac-4f21-bfdf-78179dd8c647"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.006716 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf253a3-5fac-4f21-bfdf-78179dd8c647-kube-api-access-vdtbc" (OuterVolumeSpecName: "kube-api-access-vdtbc") pod "3bf253a3-5fac-4f21-bfdf-78179dd8c647" (UID: "3bf253a3-5fac-4f21-bfdf-78179dd8c647"). InnerVolumeSpecName "kube-api-access-vdtbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.008933 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ceph" (OuterVolumeSpecName: "ceph") pod "3bf253a3-5fac-4f21-bfdf-78179dd8c647" (UID: "3bf253a3-5fac-4f21-bfdf-78179dd8c647"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.023095 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3bf253a3-5fac-4f21-bfdf-78179dd8c647" (UID: "3bf253a3-5fac-4f21-bfdf-78179dd8c647"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.025103 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-inventory" (OuterVolumeSpecName: "inventory") pod "3bf253a3-5fac-4f21-bfdf-78179dd8c647" (UID: "3bf253a3-5fac-4f21-bfdf-78179dd8c647"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.044889 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "3bf253a3-5fac-4f21-bfdf-78179dd8c647" (UID: "3bf253a3-5fac-4f21-bfdf-78179dd8c647"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.094210 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.094263 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.094281 4941 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.094295 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.094309 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdtbc\" (UniqueName: \"kubernetes.io/projected/3bf253a3-5fac-4f21-bfdf-78179dd8c647-kube-api-access-vdtbc\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.094338 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3bf253a3-5fac-4f21-bfdf-78179dd8c647-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.227881 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" event={"ID":"3bf253a3-5fac-4f21-bfdf-78179dd8c647","Type":"ContainerDied","Data":"25bc919478d503622ace4678b3b01c57fee7349666178a31fdc84b9251194b18"} Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.227954 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25bc919478d503622ace4678b3b01c57fee7349666178a31fdc84b9251194b18" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.228005 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-hj9k4" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.353769 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-467cs"] Nov 30 08:44:04 crc kubenswrapper[4941]: E1130 08:44:04.354357 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf253a3-5fac-4f21-bfdf-78179dd8c647" containerName="ovn-openstack-openstack-cell1" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.354377 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf253a3-5fac-4f21-bfdf-78179dd8c647" containerName="ovn-openstack-openstack-cell1" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.354605 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bf253a3-5fac-4f21-bfdf-78179dd8c647" containerName="ovn-openstack-openstack-cell1" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.355608 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.360521 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.360937 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.361339 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.361472 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.362527 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.364454 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.377961 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-467cs"] Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.513885 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.514044 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbszm\" (UniqueName: \"kubernetes.io/projected/984773df-2a8c-4db3-a97e-993d3e6985fc-kube-api-access-qbszm\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.514160 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.514204 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.514265 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.514706 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.514802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.617947 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.618168 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.618246 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbszm\" (UniqueName: \"kubernetes.io/projected/984773df-2a8c-4db3-a97e-993d3e6985fc-kube-api-access-qbszm\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.618375 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.618420 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.618497 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.618664 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.629343 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.629544 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.629721 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.631222 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.631349 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.631570 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.642104 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbszm\" (UniqueName: \"kubernetes.io/projected/984773df-2a8c-4db3-a97e-993d3e6985fc-kube-api-access-qbszm\") pod \"neutron-metadata-openstack-openstack-cell1-467cs\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:04 crc kubenswrapper[4941]: I1130 08:44:04.681445 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:44:05 crc kubenswrapper[4941]: I1130 08:44:05.277125 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-467cs"] Nov 30 08:44:05 crc kubenswrapper[4941]: I1130 08:44:05.343710 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:44:06 crc kubenswrapper[4941]: I1130 08:44:06.269092 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" event={"ID":"984773df-2a8c-4db3-a97e-993d3e6985fc","Type":"ContainerStarted","Data":"f690611787f0a4ccc1bddc620f519a39256cd05e58bfc76e8d4279ca02b03fd9"} Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.283733 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" event={"ID":"984773df-2a8c-4db3-a97e-993d3e6985fc","Type":"ContainerStarted","Data":"51cc47dd836218e7501acce3b03297323d109ed849dd85b5eca64f5f3e965bcb"} Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.318118 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" podStartSLOduration=2.556323582 podStartE2EDuration="3.318095708s" podCreationTimestamp="2025-11-30 08:44:04 +0000 UTC" firstStartedPulling="2025-11-30 08:44:05.343276016 +0000 UTC m=+7066.111447635" lastFinishedPulling="2025-11-30 08:44:06.105048142 +0000 UTC m=+7066.873219761" observedRunningTime="2025-11-30 08:44:07.311262737 +0000 UTC m=+7068.079434376" watchObservedRunningTime="2025-11-30 08:44:07.318095708 +0000 UTC m=+7068.086267307" Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.748615 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ts97s"] Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.754649 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.783207 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ts97s"] Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.912736 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-catalog-content\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.912812 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdkwp\" (UniqueName: \"kubernetes.io/projected/d506168d-ad41-461f-9ac2-007292bca951-kube-api-access-bdkwp\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:07 crc kubenswrapper[4941]: I1130 08:44:07.913199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-utilities\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.016317 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-utilities\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.016967 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-catalog-content\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.016974 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-utilities\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.017010 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdkwp\" (UniqueName: \"kubernetes.io/projected/d506168d-ad41-461f-9ac2-007292bca951-kube-api-access-bdkwp\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.017233 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-catalog-content\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.038948 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdkwp\" (UniqueName: \"kubernetes.io/projected/d506168d-ad41-461f-9ac2-007292bca951-kube-api-access-bdkwp\") pod \"certified-operators-ts97s\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.096991 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:08 crc kubenswrapper[4941]: I1130 08:44:08.627546 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ts97s"] Nov 30 08:44:08 crc kubenswrapper[4941]: W1130 08:44:08.636794 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd506168d_ad41_461f_9ac2_007292bca951.slice/crio-fc7f165048d84eb72e53a274ced4f0f0cdff447d1ae66b18925395fa113aff76 WatchSource:0}: Error finding container fc7f165048d84eb72e53a274ced4f0f0cdff447d1ae66b18925395fa113aff76: Status 404 returned error can't find the container with id fc7f165048d84eb72e53a274ced4f0f0cdff447d1ae66b18925395fa113aff76 Nov 30 08:44:09 crc kubenswrapper[4941]: I1130 08:44:09.331715 4941 generic.go:334] "Generic (PLEG): container finished" podID="d506168d-ad41-461f-9ac2-007292bca951" containerID="31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771" exitCode=0 Nov 30 08:44:09 crc kubenswrapper[4941]: I1130 08:44:09.331801 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerDied","Data":"31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771"} Nov 30 08:44:09 crc kubenswrapper[4941]: I1130 08:44:09.332139 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerStarted","Data":"fc7f165048d84eb72e53a274ced4f0f0cdff447d1ae66b18925395fa113aff76"} Nov 30 08:44:10 crc kubenswrapper[4941]: I1130 08:44:10.346130 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerStarted","Data":"d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd"} Nov 30 08:44:12 crc kubenswrapper[4941]: I1130 08:44:12.376302 4941 generic.go:334] "Generic (PLEG): container finished" podID="d506168d-ad41-461f-9ac2-007292bca951" containerID="d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd" exitCode=0 Nov 30 08:44:12 crc kubenswrapper[4941]: I1130 08:44:12.376461 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerDied","Data":"d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd"} Nov 30 08:44:13 crc kubenswrapper[4941]: I1130 08:44:13.390878 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerStarted","Data":"c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37"} Nov 30 08:44:13 crc kubenswrapper[4941]: I1130 08:44:13.420244 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ts97s" podStartSLOduration=2.9153025489999997 podStartE2EDuration="6.420216157s" podCreationTimestamp="2025-11-30 08:44:07 +0000 UTC" firstStartedPulling="2025-11-30 08:44:09.334254829 +0000 UTC m=+7070.102426438" lastFinishedPulling="2025-11-30 08:44:12.839168437 +0000 UTC m=+7073.607340046" observedRunningTime="2025-11-30 08:44:13.412693305 +0000 UTC m=+7074.180864914" watchObservedRunningTime="2025-11-30 08:44:13.420216157 +0000 UTC m=+7074.188387766" Nov 30 08:44:18 crc kubenswrapper[4941]: I1130 08:44:18.097628 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:18 crc kubenswrapper[4941]: I1130 08:44:18.098499 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:18 crc kubenswrapper[4941]: I1130 08:44:18.171804 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:18 crc kubenswrapper[4941]: I1130 08:44:18.498146 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:21 crc kubenswrapper[4941]: I1130 08:44:21.724315 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ts97s"] Nov 30 08:44:21 crc kubenswrapper[4941]: I1130 08:44:21.724999 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ts97s" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="registry-server" containerID="cri-o://c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37" gracePeriod=2 Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.266173 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.337841 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdkwp\" (UniqueName: \"kubernetes.io/projected/d506168d-ad41-461f-9ac2-007292bca951-kube-api-access-bdkwp\") pod \"d506168d-ad41-461f-9ac2-007292bca951\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.338491 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-utilities\") pod \"d506168d-ad41-461f-9ac2-007292bca951\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.338698 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-catalog-content\") pod \"d506168d-ad41-461f-9ac2-007292bca951\" (UID: \"d506168d-ad41-461f-9ac2-007292bca951\") " Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.339550 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-utilities" (OuterVolumeSpecName: "utilities") pod "d506168d-ad41-461f-9ac2-007292bca951" (UID: "d506168d-ad41-461f-9ac2-007292bca951"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.345880 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d506168d-ad41-461f-9ac2-007292bca951-kube-api-access-bdkwp" (OuterVolumeSpecName: "kube-api-access-bdkwp") pod "d506168d-ad41-461f-9ac2-007292bca951" (UID: "d506168d-ad41-461f-9ac2-007292bca951"). InnerVolumeSpecName "kube-api-access-bdkwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.388678 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d506168d-ad41-461f-9ac2-007292bca951" (UID: "d506168d-ad41-461f-9ac2-007292bca951"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.442187 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.442648 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d506168d-ad41-461f-9ac2-007292bca951-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.442723 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdkwp\" (UniqueName: \"kubernetes.io/projected/d506168d-ad41-461f-9ac2-007292bca951-kube-api-access-bdkwp\") on node \"crc\" DevicePath \"\"" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.507932 4941 generic.go:334] "Generic (PLEG): container finished" podID="d506168d-ad41-461f-9ac2-007292bca951" containerID="c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37" exitCode=0 Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.508012 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerDied","Data":"c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37"} Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.508074 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ts97s" event={"ID":"d506168d-ad41-461f-9ac2-007292bca951","Type":"ContainerDied","Data":"fc7f165048d84eb72e53a274ced4f0f0cdff447d1ae66b18925395fa113aff76"} Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.508097 4941 scope.go:117] "RemoveContainer" containerID="c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.508020 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ts97s" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.531345 4941 scope.go:117] "RemoveContainer" containerID="d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.546741 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ts97s"] Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.554817 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ts97s"] Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.573434 4941 scope.go:117] "RemoveContainer" containerID="31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.628776 4941 scope.go:117] "RemoveContainer" containerID="c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37" Nov 30 08:44:22 crc kubenswrapper[4941]: E1130 08:44:22.629422 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37\": container with ID starting with c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37 not found: ID does not exist" containerID="c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.629504 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37"} err="failed to get container status \"c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37\": rpc error: code = NotFound desc = could not find container \"c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37\": container with ID starting with c8f10db15f8e4b60aca2570bd447ae50bb7a0ae2051c38c52e909543dceadd37 not found: ID does not exist" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.629557 4941 scope.go:117] "RemoveContainer" containerID="d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd" Nov 30 08:44:22 crc kubenswrapper[4941]: E1130 08:44:22.629997 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd\": container with ID starting with d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd not found: ID does not exist" containerID="d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.630032 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd"} err="failed to get container status \"d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd\": rpc error: code = NotFound desc = could not find container \"d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd\": container with ID starting with d64821a958f462b8b716582ecfb9584cee9945288df679a244f002a0b9dc49fd not found: ID does not exist" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.630054 4941 scope.go:117] "RemoveContainer" containerID="31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771" Nov 30 08:44:22 crc kubenswrapper[4941]: E1130 08:44:22.630667 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771\": container with ID starting with 31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771 not found: ID does not exist" containerID="31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771" Nov 30 08:44:22 crc kubenswrapper[4941]: I1130 08:44:22.630706 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771"} err="failed to get container status \"31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771\": rpc error: code = NotFound desc = could not find container \"31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771\": container with ID starting with 31285635479e3f26f45fe248485bdcad859949cead2274da26214fc106276771 not found: ID does not exist" Nov 30 08:44:23 crc kubenswrapper[4941]: I1130 08:44:23.562276 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d506168d-ad41-461f-9ac2-007292bca951" path="/var/lib/kubelet/pods/d506168d-ad41-461f-9ac2-007292bca951/volumes" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.158242 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5"] Nov 30 08:45:00 crc kubenswrapper[4941]: E1130 08:45:00.161343 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="registry-server" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.161363 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="registry-server" Nov 30 08:45:00 crc kubenswrapper[4941]: E1130 08:45:00.161406 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="extract-content" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.161413 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="extract-content" Nov 30 08:45:00 crc kubenswrapper[4941]: E1130 08:45:00.161430 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="extract-utilities" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.161438 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="extract-utilities" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.161732 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d506168d-ad41-461f-9ac2-007292bca951" containerName="registry-server" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.162688 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.174108 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5"] Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.207416 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.207631 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.256901 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152a5a57-c99f-42f3-8050-101bcfc1f2ae-secret-volume\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.257470 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152a5a57-c99f-42f3-8050-101bcfc1f2ae-config-volume\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.257629 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mml2\" (UniqueName: \"kubernetes.io/projected/152a5a57-c99f-42f3-8050-101bcfc1f2ae-kube-api-access-2mml2\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.359942 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mml2\" (UniqueName: \"kubernetes.io/projected/152a5a57-c99f-42f3-8050-101bcfc1f2ae-kube-api-access-2mml2\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.360074 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152a5a57-c99f-42f3-8050-101bcfc1f2ae-secret-volume\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.360242 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152a5a57-c99f-42f3-8050-101bcfc1f2ae-config-volume\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.361464 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152a5a57-c99f-42f3-8050-101bcfc1f2ae-config-volume\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.376226 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152a5a57-c99f-42f3-8050-101bcfc1f2ae-secret-volume\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.379383 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mml2\" (UniqueName: \"kubernetes.io/projected/152a5a57-c99f-42f3-8050-101bcfc1f2ae-kube-api-access-2mml2\") pod \"collect-profiles-29408205-rbdm5\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:00 crc kubenswrapper[4941]: I1130 08:45:00.540930 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:01 crc kubenswrapper[4941]: I1130 08:45:01.060701 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5"] Nov 30 08:45:02 crc kubenswrapper[4941]: I1130 08:45:02.050495 4941 generic.go:334] "Generic (PLEG): container finished" podID="152a5a57-c99f-42f3-8050-101bcfc1f2ae" containerID="293cfdc2f87f1cd001abeb0f1cdf1141a38b18bc69033b5ca8362178b93c3b55" exitCode=0 Nov 30 08:45:02 crc kubenswrapper[4941]: I1130 08:45:02.050604 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" event={"ID":"152a5a57-c99f-42f3-8050-101bcfc1f2ae","Type":"ContainerDied","Data":"293cfdc2f87f1cd001abeb0f1cdf1141a38b18bc69033b5ca8362178b93c3b55"} Nov 30 08:45:02 crc kubenswrapper[4941]: I1130 08:45:02.052901 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" event={"ID":"152a5a57-c99f-42f3-8050-101bcfc1f2ae","Type":"ContainerStarted","Data":"52b92d987d8c67d34b797b9d12a3c386fbe69ac5c914508a3afcd1068ae5c5ee"} Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.066428 4941 generic.go:334] "Generic (PLEG): container finished" podID="984773df-2a8c-4db3-a97e-993d3e6985fc" containerID="51cc47dd836218e7501acce3b03297323d109ed849dd85b5eca64f5f3e965bcb" exitCode=0 Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.066701 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" event={"ID":"984773df-2a8c-4db3-a97e-993d3e6985fc","Type":"ContainerDied","Data":"51cc47dd836218e7501acce3b03297323d109ed849dd85b5eca64f5f3e965bcb"} Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.529926 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.660100 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mml2\" (UniqueName: \"kubernetes.io/projected/152a5a57-c99f-42f3-8050-101bcfc1f2ae-kube-api-access-2mml2\") pod \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.660267 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152a5a57-c99f-42f3-8050-101bcfc1f2ae-config-volume\") pod \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.660595 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152a5a57-c99f-42f3-8050-101bcfc1f2ae-secret-volume\") pod \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\" (UID: \"152a5a57-c99f-42f3-8050-101bcfc1f2ae\") " Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.661484 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/152a5a57-c99f-42f3-8050-101bcfc1f2ae-config-volume" (OuterVolumeSpecName: "config-volume") pod "152a5a57-c99f-42f3-8050-101bcfc1f2ae" (UID: "152a5a57-c99f-42f3-8050-101bcfc1f2ae"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.670318 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152a5a57-c99f-42f3-8050-101bcfc1f2ae-kube-api-access-2mml2" (OuterVolumeSpecName: "kube-api-access-2mml2") pod "152a5a57-c99f-42f3-8050-101bcfc1f2ae" (UID: "152a5a57-c99f-42f3-8050-101bcfc1f2ae"). InnerVolumeSpecName "kube-api-access-2mml2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.671473 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152a5a57-c99f-42f3-8050-101bcfc1f2ae-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "152a5a57-c99f-42f3-8050-101bcfc1f2ae" (UID: "152a5a57-c99f-42f3-8050-101bcfc1f2ae"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.763045 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/152a5a57-c99f-42f3-8050-101bcfc1f2ae-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.764537 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mml2\" (UniqueName: \"kubernetes.io/projected/152a5a57-c99f-42f3-8050-101bcfc1f2ae-kube-api-access-2mml2\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:03 crc kubenswrapper[4941]: I1130 08:45:03.764932 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/152a5a57-c99f-42f3-8050-101bcfc1f2ae-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.104081 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.105304 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5" event={"ID":"152a5a57-c99f-42f3-8050-101bcfc1f2ae","Type":"ContainerDied","Data":"52b92d987d8c67d34b797b9d12a3c386fbe69ac5c914508a3afcd1068ae5c5ee"} Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.105397 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52b92d987d8c67d34b797b9d12a3c386fbe69ac5c914508a3afcd1068ae5c5ee" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.539092 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691145 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbszm\" (UniqueName: \"kubernetes.io/projected/984773df-2a8c-4db3-a97e-993d3e6985fc-kube-api-access-qbszm\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691237 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-nova-metadata-neutron-config-0\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691364 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-inventory\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691508 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ceph\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691550 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-ovn-metadata-agent-neutron-config-0\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691584 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-metadata-combined-ca-bundle\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.691630 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ssh-key\") pod \"984773df-2a8c-4db3-a97e-993d3e6985fc\" (UID: \"984773df-2a8c-4db3-a97e-993d3e6985fc\") " Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.728138 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.729670 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/984773df-2a8c-4db3-a97e-993d3e6985fc-kube-api-access-qbszm" (OuterVolumeSpecName: "kube-api-access-qbszm") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "kube-api-access-qbszm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.749545 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ceph" (OuterVolumeSpecName: "ceph") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.779617 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.806661 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-inventory" (OuterVolumeSpecName: "inventory") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.823196 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg"] Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.842220 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbszm\" (UniqueName: \"kubernetes.io/projected/984773df-2a8c-4db3-a97e-993d3e6985fc-kube-api-access-qbszm\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.842267 4941 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.842280 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.842292 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.842304 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.854701 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.871683 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408160-tcnkg"] Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.889939 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "984773df-2a8c-4db3-a97e-993d3e6985fc" (UID: "984773df-2a8c-4db3-a97e-993d3e6985fc"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.944772 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:04 crc kubenswrapper[4941]: I1130 08:45:04.945293 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/984773df-2a8c-4db3-a97e-993d3e6985fc-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.118547 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" event={"ID":"984773df-2a8c-4db3-a97e-993d3e6985fc","Type":"ContainerDied","Data":"f690611787f0a4ccc1bddc620f519a39256cd05e58bfc76e8d4279ca02b03fd9"} Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.118620 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f690611787f0a4ccc1bddc620f519a39256cd05e58bfc76e8d4279ca02b03fd9" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.118633 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-467cs" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.283555 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-fmfhx"] Nov 30 08:45:05 crc kubenswrapper[4941]: E1130 08:45:05.284108 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="984773df-2a8c-4db3-a97e-993d3e6985fc" containerName="neutron-metadata-openstack-openstack-cell1" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.284131 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="984773df-2a8c-4db3-a97e-993d3e6985fc" containerName="neutron-metadata-openstack-openstack-cell1" Nov 30 08:45:05 crc kubenswrapper[4941]: E1130 08:45:05.284155 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152a5a57-c99f-42f3-8050-101bcfc1f2ae" containerName="collect-profiles" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.284161 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="152a5a57-c99f-42f3-8050-101bcfc1f2ae" containerName="collect-profiles" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.284450 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="152a5a57-c99f-42f3-8050-101bcfc1f2ae" containerName="collect-profiles" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.284473 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="984773df-2a8c-4db3-a97e-993d3e6985fc" containerName="neutron-metadata-openstack-openstack-cell1" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.285252 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-fmfhx"] Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.285365 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.307996 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.308394 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.308574 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.308802 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.319537 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.355192 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.355257 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ceph\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.355318 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ssh-key\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.355352 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw58h\" (UniqueName: \"kubernetes.io/projected/c6e65b7b-aabd-4898-b48f-90a421e836a5-kube-api-access-jw58h\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.355425 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.355531 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-inventory\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.458005 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.458171 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-inventory\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.458205 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.458241 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ceph\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.458313 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw58h\" (UniqueName: \"kubernetes.io/projected/c6e65b7b-aabd-4898-b48f-90a421e836a5-kube-api-access-jw58h\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.458357 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ssh-key\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.465286 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ceph\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.470043 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.470267 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ssh-key\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.470824 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-inventory\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.471015 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.489654 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw58h\" (UniqueName: \"kubernetes.io/projected/c6e65b7b-aabd-4898-b48f-90a421e836a5-kube-api-access-jw58h\") pod \"libvirt-openstack-openstack-cell1-fmfhx\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.537925 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="616a8168-57aa-4b46-b33d-9c09e769c0e4" path="/var/lib/kubelet/pods/616a8168-57aa-4b46-b33d-9c09e769c0e4/volumes" Nov 30 08:45:05 crc kubenswrapper[4941]: I1130 08:45:05.622209 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:45:06 crc kubenswrapper[4941]: I1130 08:45:06.247382 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-fmfhx"] Nov 30 08:45:07 crc kubenswrapper[4941]: I1130 08:45:07.144994 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" event={"ID":"c6e65b7b-aabd-4898-b48f-90a421e836a5","Type":"ContainerStarted","Data":"06f8ebfe010f8c95515a176e0c4a369d872ade4cbbb379e24beb27071b21be6b"} Nov 30 08:45:08 crc kubenswrapper[4941]: I1130 08:45:08.161114 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" event={"ID":"c6e65b7b-aabd-4898-b48f-90a421e836a5","Type":"ContainerStarted","Data":"f90ffb8077c58bf5d42dfba527ac33210e1a0609f7b685004efe5d1c7d9e80b2"} Nov 30 08:45:08 crc kubenswrapper[4941]: I1130 08:45:08.205187 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" podStartSLOduration=2.405311343 podStartE2EDuration="3.205163007s" podCreationTimestamp="2025-11-30 08:45:05 +0000 UTC" firstStartedPulling="2025-11-30 08:45:06.266587095 +0000 UTC m=+7127.034758704" lastFinishedPulling="2025-11-30 08:45:07.066438759 +0000 UTC m=+7127.834610368" observedRunningTime="2025-11-30 08:45:08.192219518 +0000 UTC m=+7128.960391157" watchObservedRunningTime="2025-11-30 08:45:08.205163007 +0000 UTC m=+7128.973334616" Nov 30 08:45:52 crc kubenswrapper[4941]: I1130 08:45:52.556656 4941 scope.go:117] "RemoveContainer" containerID="4ab5613d7584bc7779115f37e6d3e25ee552a5e462b8d63120eb728fa7bbec88" Nov 30 08:46:02 crc kubenswrapper[4941]: I1130 08:46:02.978243 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:46:02 crc kubenswrapper[4941]: I1130 08:46:02.979260 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:46:32 crc kubenswrapper[4941]: I1130 08:46:32.979372 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:46:32 crc kubenswrapper[4941]: I1130 08:46:32.980831 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:47:02 crc kubenswrapper[4941]: I1130 08:47:02.978563 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:47:02 crc kubenswrapper[4941]: I1130 08:47:02.979656 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:47:02 crc kubenswrapper[4941]: I1130 08:47:02.979741 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:47:02 crc kubenswrapper[4941]: I1130 08:47:02.981197 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"83c532f6891919fbe101ca53964d4545bf26f192c23b11204d7a74e87bf589c4"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:47:02 crc kubenswrapper[4941]: I1130 08:47:02.981359 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://83c532f6891919fbe101ca53964d4545bf26f192c23b11204d7a74e87bf589c4" gracePeriod=600 Nov 30 08:47:03 crc kubenswrapper[4941]: I1130 08:47:03.729925 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="83c532f6891919fbe101ca53964d4545bf26f192c23b11204d7a74e87bf589c4" exitCode=0 Nov 30 08:47:03 crc kubenswrapper[4941]: I1130 08:47:03.729978 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"83c532f6891919fbe101ca53964d4545bf26f192c23b11204d7a74e87bf589c4"} Nov 30 08:47:03 crc kubenswrapper[4941]: I1130 08:47:03.730729 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf"} Nov 30 08:47:03 crc kubenswrapper[4941]: I1130 08:47:03.730759 4941 scope.go:117] "RemoveContainer" containerID="b20f0fe3e60ce813728cef29d544129178d47d51a9b7e4112b1c8f0f1631d499" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.134502 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k4hmx"] Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.145684 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.163554 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k4hmx"] Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.308648 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-catalog-content\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.308736 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r86f\" (UniqueName: \"kubernetes.io/projected/f171062f-71ae-4475-8efd-ec79ac2042ec-kube-api-access-4r86f\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.308860 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-utilities\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.410812 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-utilities\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.410924 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-catalog-content\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.410967 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r86f\" (UniqueName: \"kubernetes.io/projected/f171062f-71ae-4475-8efd-ec79ac2042ec-kube-api-access-4r86f\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.411625 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-utilities\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.411726 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-catalog-content\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.437007 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r86f\" (UniqueName: \"kubernetes.io/projected/f171062f-71ae-4475-8efd-ec79ac2042ec-kube-api-access-4r86f\") pod \"redhat-operators-k4hmx\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:10 crc kubenswrapper[4941]: I1130 08:49:10.473223 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:11 crc kubenswrapper[4941]: I1130 08:49:11.064943 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k4hmx"] Nov 30 08:49:11 crc kubenswrapper[4941]: I1130 08:49:11.170941 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerStarted","Data":"732c5fbee4cc568d2f3b868edb67ea9821ac94abd9f67a897e3140dc6e396afb"} Nov 30 08:49:12 crc kubenswrapper[4941]: I1130 08:49:12.185995 4941 generic.go:334] "Generic (PLEG): container finished" podID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerID="19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1" exitCode=0 Nov 30 08:49:12 crc kubenswrapper[4941]: I1130 08:49:12.186067 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerDied","Data":"19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1"} Nov 30 08:49:12 crc kubenswrapper[4941]: I1130 08:49:12.190590 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:49:13 crc kubenswrapper[4941]: I1130 08:49:13.211029 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerStarted","Data":"624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066"} Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.109227 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dlzt6"] Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.115259 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.130301 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dlzt6"] Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.164082 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-utilities\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.164371 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pqwt\" (UniqueName: \"kubernetes.io/projected/f9567424-77e7-494a-b2dc-baeb30f5fcf5-kube-api-access-6pqwt\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.164638 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-catalog-content\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.271227 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pqwt\" (UniqueName: \"kubernetes.io/projected/f9567424-77e7-494a-b2dc-baeb30f5fcf5-kube-api-access-6pqwt\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.271428 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-catalog-content\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.271514 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-utilities\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.272569 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-catalog-content\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.273045 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-utilities\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.309297 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pqwt\" (UniqueName: \"kubernetes.io/projected/f9567424-77e7-494a-b2dc-baeb30f5fcf5-kube-api-access-6pqwt\") pod \"redhat-marketplace-dlzt6\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:15 crc kubenswrapper[4941]: I1130 08:49:15.442446 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:16 crc kubenswrapper[4941]: I1130 08:49:16.053133 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dlzt6"] Nov 30 08:49:16 crc kubenswrapper[4941]: W1130 08:49:16.055166 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9567424_77e7_494a_b2dc_baeb30f5fcf5.slice/crio-7babc46a6c54b7e821e813bd091537eb1cb5ca6956774e716008212c61ab0372 WatchSource:0}: Error finding container 7babc46a6c54b7e821e813bd091537eb1cb5ca6956774e716008212c61ab0372: Status 404 returned error can't find the container with id 7babc46a6c54b7e821e813bd091537eb1cb5ca6956774e716008212c61ab0372 Nov 30 08:49:16 crc kubenswrapper[4941]: I1130 08:49:16.290503 4941 generic.go:334] "Generic (PLEG): container finished" podID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerID="624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066" exitCode=0 Nov 30 08:49:16 crc kubenswrapper[4941]: I1130 08:49:16.290584 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerDied","Data":"624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066"} Nov 30 08:49:16 crc kubenswrapper[4941]: I1130 08:49:16.311519 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dlzt6" event={"ID":"f9567424-77e7-494a-b2dc-baeb30f5fcf5","Type":"ContainerStarted","Data":"7babc46a6c54b7e821e813bd091537eb1cb5ca6956774e716008212c61ab0372"} Nov 30 08:49:17 crc kubenswrapper[4941]: I1130 08:49:17.329493 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerStarted","Data":"908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85"} Nov 30 08:49:17 crc kubenswrapper[4941]: I1130 08:49:17.333643 4941 generic.go:334] "Generic (PLEG): container finished" podID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerID="8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682" exitCode=0 Nov 30 08:49:17 crc kubenswrapper[4941]: I1130 08:49:17.333736 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dlzt6" event={"ID":"f9567424-77e7-494a-b2dc-baeb30f5fcf5","Type":"ContainerDied","Data":"8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682"} Nov 30 08:49:17 crc kubenswrapper[4941]: I1130 08:49:17.374641 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k4hmx" podStartSLOduration=2.786274306 podStartE2EDuration="7.37462326s" podCreationTimestamp="2025-11-30 08:49:10 +0000 UTC" firstStartedPulling="2025-11-30 08:49:12.190261296 +0000 UTC m=+7372.958432915" lastFinishedPulling="2025-11-30 08:49:16.77861025 +0000 UTC m=+7377.546781869" observedRunningTime="2025-11-30 08:49:17.367716766 +0000 UTC m=+7378.135888405" watchObservedRunningTime="2025-11-30 08:49:17.37462326 +0000 UTC m=+7378.142794869" Nov 30 08:49:19 crc kubenswrapper[4941]: I1130 08:49:19.360772 4941 generic.go:334] "Generic (PLEG): container finished" podID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerID="af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740" exitCode=0 Nov 30 08:49:19 crc kubenswrapper[4941]: I1130 08:49:19.360838 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dlzt6" event={"ID":"f9567424-77e7-494a-b2dc-baeb30f5fcf5","Type":"ContainerDied","Data":"af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740"} Nov 30 08:49:20 crc kubenswrapper[4941]: I1130 08:49:20.389811 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dlzt6" event={"ID":"f9567424-77e7-494a-b2dc-baeb30f5fcf5","Type":"ContainerStarted","Data":"e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21"} Nov 30 08:49:20 crc kubenswrapper[4941]: I1130 08:49:20.415559 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dlzt6" podStartSLOduration=2.906571069 podStartE2EDuration="5.415536069s" podCreationTimestamp="2025-11-30 08:49:15 +0000 UTC" firstStartedPulling="2025-11-30 08:49:17.337595545 +0000 UTC m=+7378.105767154" lastFinishedPulling="2025-11-30 08:49:19.846560535 +0000 UTC m=+7380.614732154" observedRunningTime="2025-11-30 08:49:20.411366701 +0000 UTC m=+7381.179538310" watchObservedRunningTime="2025-11-30 08:49:20.415536069 +0000 UTC m=+7381.183707688" Nov 30 08:49:20 crc kubenswrapper[4941]: I1130 08:49:20.474006 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:20 crc kubenswrapper[4941]: I1130 08:49:20.474082 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:21 crc kubenswrapper[4941]: I1130 08:49:21.538692 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-k4hmx" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="registry-server" probeResult="failure" output=< Nov 30 08:49:21 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:49:21 crc kubenswrapper[4941]: > Nov 30 08:49:25 crc kubenswrapper[4941]: I1130 08:49:25.442628 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:25 crc kubenswrapper[4941]: I1130 08:49:25.443209 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:25 crc kubenswrapper[4941]: I1130 08:49:25.505440 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:26 crc kubenswrapper[4941]: I1130 08:49:26.535181 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:27 crc kubenswrapper[4941]: I1130 08:49:27.932121 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dlzt6"] Nov 30 08:49:28 crc kubenswrapper[4941]: I1130 08:49:28.480928 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dlzt6" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="registry-server" containerID="cri-o://e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21" gracePeriod=2 Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.005720 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.159513 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pqwt\" (UniqueName: \"kubernetes.io/projected/f9567424-77e7-494a-b2dc-baeb30f5fcf5-kube-api-access-6pqwt\") pod \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.159941 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-catalog-content\") pod \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.160093 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-utilities\") pod \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\" (UID: \"f9567424-77e7-494a-b2dc-baeb30f5fcf5\") " Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.161282 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-utilities" (OuterVolumeSpecName: "utilities") pod "f9567424-77e7-494a-b2dc-baeb30f5fcf5" (UID: "f9567424-77e7-494a-b2dc-baeb30f5fcf5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.168796 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9567424-77e7-494a-b2dc-baeb30f5fcf5-kube-api-access-6pqwt" (OuterVolumeSpecName: "kube-api-access-6pqwt") pod "f9567424-77e7-494a-b2dc-baeb30f5fcf5" (UID: "f9567424-77e7-494a-b2dc-baeb30f5fcf5"). InnerVolumeSpecName "kube-api-access-6pqwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.179915 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9567424-77e7-494a-b2dc-baeb30f5fcf5" (UID: "f9567424-77e7-494a-b2dc-baeb30f5fcf5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.263280 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pqwt\" (UniqueName: \"kubernetes.io/projected/f9567424-77e7-494a-b2dc-baeb30f5fcf5-kube-api-access-6pqwt\") on node \"crc\" DevicePath \"\"" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.263346 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.263358 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9567424-77e7-494a-b2dc-baeb30f5fcf5-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.500403 4941 generic.go:334] "Generic (PLEG): container finished" podID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerID="e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21" exitCode=0 Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.500507 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dlzt6" event={"ID":"f9567424-77e7-494a-b2dc-baeb30f5fcf5","Type":"ContainerDied","Data":"e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21"} Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.500535 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dlzt6" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.500555 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dlzt6" event={"ID":"f9567424-77e7-494a-b2dc-baeb30f5fcf5","Type":"ContainerDied","Data":"7babc46a6c54b7e821e813bd091537eb1cb5ca6956774e716008212c61ab0372"} Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.500593 4941 scope.go:117] "RemoveContainer" containerID="e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.527875 4941 scope.go:117] "RemoveContainer" containerID="af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.554897 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dlzt6"] Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.556107 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dlzt6"] Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.566592 4941 scope.go:117] "RemoveContainer" containerID="8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.600848 4941 scope.go:117] "RemoveContainer" containerID="e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21" Nov 30 08:49:29 crc kubenswrapper[4941]: E1130 08:49:29.601234 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21\": container with ID starting with e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21 not found: ID does not exist" containerID="e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.601266 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21"} err="failed to get container status \"e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21\": rpc error: code = NotFound desc = could not find container \"e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21\": container with ID starting with e626c79c10280f2f3d1c16dede236b2c0da46eb4c7a365d5e0e0b4e9653e1b21 not found: ID does not exist" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.601290 4941 scope.go:117] "RemoveContainer" containerID="af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740" Nov 30 08:49:29 crc kubenswrapper[4941]: E1130 08:49:29.601894 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740\": container with ID starting with af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740 not found: ID does not exist" containerID="af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.601951 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740"} err="failed to get container status \"af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740\": rpc error: code = NotFound desc = could not find container \"af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740\": container with ID starting with af62b604cab430fe7743105f71b279723e9d3d9cb4d09d8709bbfcc729401740 not found: ID does not exist" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.601971 4941 scope.go:117] "RemoveContainer" containerID="8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682" Nov 30 08:49:29 crc kubenswrapper[4941]: E1130 08:49:29.602183 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682\": container with ID starting with 8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682 not found: ID does not exist" containerID="8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682" Nov 30 08:49:29 crc kubenswrapper[4941]: I1130 08:49:29.602199 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682"} err="failed to get container status \"8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682\": rpc error: code = NotFound desc = could not find container \"8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682\": container with ID starting with 8451921ecf2cd6bdfe6a06b09c793baf6550f631e4fea407596062412b7a2682 not found: ID does not exist" Nov 30 08:49:29 crc kubenswrapper[4941]: E1130 08:49:29.685080 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9567424_77e7_494a_b2dc_baeb30f5fcf5.slice\": RecentStats: unable to find data in memory cache]" Nov 30 08:49:30 crc kubenswrapper[4941]: I1130 08:49:30.547520 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:30 crc kubenswrapper[4941]: I1130 08:49:30.651558 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:31 crc kubenswrapper[4941]: I1130 08:49:31.537881 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" path="/var/lib/kubelet/pods/f9567424-77e7-494a-b2dc-baeb30f5fcf5/volumes" Nov 30 08:49:32 crc kubenswrapper[4941]: I1130 08:49:32.315835 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k4hmx"] Nov 30 08:49:32 crc kubenswrapper[4941]: I1130 08:49:32.537415 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k4hmx" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="registry-server" containerID="cri-o://908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85" gracePeriod=2 Nov 30 08:49:32 crc kubenswrapper[4941]: I1130 08:49:32.978854 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:49:32 crc kubenswrapper[4941]: I1130 08:49:32.978936 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.069837 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.176523 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r86f\" (UniqueName: \"kubernetes.io/projected/f171062f-71ae-4475-8efd-ec79ac2042ec-kube-api-access-4r86f\") pod \"f171062f-71ae-4475-8efd-ec79ac2042ec\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.176813 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-utilities\") pod \"f171062f-71ae-4475-8efd-ec79ac2042ec\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.176847 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-catalog-content\") pod \"f171062f-71ae-4475-8efd-ec79ac2042ec\" (UID: \"f171062f-71ae-4475-8efd-ec79ac2042ec\") " Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.177639 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-utilities" (OuterVolumeSpecName: "utilities") pod "f171062f-71ae-4475-8efd-ec79ac2042ec" (UID: "f171062f-71ae-4475-8efd-ec79ac2042ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.183728 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f171062f-71ae-4475-8efd-ec79ac2042ec-kube-api-access-4r86f" (OuterVolumeSpecName: "kube-api-access-4r86f") pod "f171062f-71ae-4475-8efd-ec79ac2042ec" (UID: "f171062f-71ae-4475-8efd-ec79ac2042ec"). InnerVolumeSpecName "kube-api-access-4r86f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.284607 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.285028 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r86f\" (UniqueName: \"kubernetes.io/projected/f171062f-71ae-4475-8efd-ec79ac2042ec-kube-api-access-4r86f\") on node \"crc\" DevicePath \"\"" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.317133 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f171062f-71ae-4475-8efd-ec79ac2042ec" (UID: "f171062f-71ae-4475-8efd-ec79ac2042ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.387201 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f171062f-71ae-4475-8efd-ec79ac2042ec-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.551769 4941 generic.go:334] "Generic (PLEG): container finished" podID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerID="908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85" exitCode=0 Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.551818 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerDied","Data":"908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85"} Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.551847 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k4hmx" event={"ID":"f171062f-71ae-4475-8efd-ec79ac2042ec","Type":"ContainerDied","Data":"732c5fbee4cc568d2f3b868edb67ea9821ac94abd9f67a897e3140dc6e396afb"} Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.551867 4941 scope.go:117] "RemoveContainer" containerID="908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.551863 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k4hmx" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.583671 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k4hmx"] Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.585964 4941 scope.go:117] "RemoveContainer" containerID="624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.594718 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k4hmx"] Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.623671 4941 scope.go:117] "RemoveContainer" containerID="19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.666523 4941 scope.go:117] "RemoveContainer" containerID="908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85" Nov 30 08:49:33 crc kubenswrapper[4941]: E1130 08:49:33.667055 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85\": container with ID starting with 908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85 not found: ID does not exist" containerID="908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.667118 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85"} err="failed to get container status \"908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85\": rpc error: code = NotFound desc = could not find container \"908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85\": container with ID starting with 908e66ea0fbff9dfe8eef73bbe80514a18f2c73f73612ebd78178d7f0cc80a85 not found: ID does not exist" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.667158 4941 scope.go:117] "RemoveContainer" containerID="624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066" Nov 30 08:49:33 crc kubenswrapper[4941]: E1130 08:49:33.667864 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066\": container with ID starting with 624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066 not found: ID does not exist" containerID="624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.667911 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066"} err="failed to get container status \"624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066\": rpc error: code = NotFound desc = could not find container \"624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066\": container with ID starting with 624d98164cea2a08a8a0989cb20073772a7c80bcccf0dff714d0c08c8f414066 not found: ID does not exist" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.667950 4941 scope.go:117] "RemoveContainer" containerID="19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1" Nov 30 08:49:33 crc kubenswrapper[4941]: E1130 08:49:33.668361 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1\": container with ID starting with 19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1 not found: ID does not exist" containerID="19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1" Nov 30 08:49:33 crc kubenswrapper[4941]: I1130 08:49:33.668404 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1"} err="failed to get container status \"19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1\": rpc error: code = NotFound desc = could not find container \"19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1\": container with ID starting with 19f798df3f81594f59b80e567c79a7dac1863dd095c0cc3eee5c57949ef6dcd1 not found: ID does not exist" Nov 30 08:49:35 crc kubenswrapper[4941]: I1130 08:49:35.534752 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" path="/var/lib/kubelet/pods/f171062f-71ae-4475-8efd-ec79ac2042ec/volumes" Nov 30 08:50:01 crc kubenswrapper[4941]: I1130 08:50:01.941538 4941 generic.go:334] "Generic (PLEG): container finished" podID="c6e65b7b-aabd-4898-b48f-90a421e836a5" containerID="f90ffb8077c58bf5d42dfba527ac33210e1a0609f7b685004efe5d1c7d9e80b2" exitCode=0 Nov 30 08:50:01 crc kubenswrapper[4941]: I1130 08:50:01.941652 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" event={"ID":"c6e65b7b-aabd-4898-b48f-90a421e836a5","Type":"ContainerDied","Data":"f90ffb8077c58bf5d42dfba527ac33210e1a0609f7b685004efe5d1c7d9e80b2"} Nov 30 08:50:02 crc kubenswrapper[4941]: I1130 08:50:02.980832 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:50:02 crc kubenswrapper[4941]: I1130 08:50:02.981507 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.543913 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.668401 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ceph\") pod \"c6e65b7b-aabd-4898-b48f-90a421e836a5\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.669934 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-secret-0\") pod \"c6e65b7b-aabd-4898-b48f-90a421e836a5\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.670175 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-inventory\") pod \"c6e65b7b-aabd-4898-b48f-90a421e836a5\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.670260 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-combined-ca-bundle\") pod \"c6e65b7b-aabd-4898-b48f-90a421e836a5\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.670739 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jw58h\" (UniqueName: \"kubernetes.io/projected/c6e65b7b-aabd-4898-b48f-90a421e836a5-kube-api-access-jw58h\") pod \"c6e65b7b-aabd-4898-b48f-90a421e836a5\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.671407 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ssh-key\") pod \"c6e65b7b-aabd-4898-b48f-90a421e836a5\" (UID: \"c6e65b7b-aabd-4898-b48f-90a421e836a5\") " Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.676969 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c6e65b7b-aabd-4898-b48f-90a421e836a5" (UID: "c6e65b7b-aabd-4898-b48f-90a421e836a5"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.678799 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6e65b7b-aabd-4898-b48f-90a421e836a5-kube-api-access-jw58h" (OuterVolumeSpecName: "kube-api-access-jw58h") pod "c6e65b7b-aabd-4898-b48f-90a421e836a5" (UID: "c6e65b7b-aabd-4898-b48f-90a421e836a5"). InnerVolumeSpecName "kube-api-access-jw58h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.679362 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ceph" (OuterVolumeSpecName: "ceph") pod "c6e65b7b-aabd-4898-b48f-90a421e836a5" (UID: "c6e65b7b-aabd-4898-b48f-90a421e836a5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.704469 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-inventory" (OuterVolumeSpecName: "inventory") pod "c6e65b7b-aabd-4898-b48f-90a421e836a5" (UID: "c6e65b7b-aabd-4898-b48f-90a421e836a5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.713242 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "c6e65b7b-aabd-4898-b48f-90a421e836a5" (UID: "c6e65b7b-aabd-4898-b48f-90a421e836a5"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.713556 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c6e65b7b-aabd-4898-b48f-90a421e836a5" (UID: "c6e65b7b-aabd-4898-b48f-90a421e836a5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.775502 4941 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.775541 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.775553 4941 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.775564 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jw58h\" (UniqueName: \"kubernetes.io/projected/c6e65b7b-aabd-4898-b48f-90a421e836a5-kube-api-access-jw58h\") on node \"crc\" DevicePath \"\"" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.775575 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.775587 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c6e65b7b-aabd-4898-b48f-90a421e836a5-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.986824 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" event={"ID":"c6e65b7b-aabd-4898-b48f-90a421e836a5","Type":"ContainerDied","Data":"06f8ebfe010f8c95515a176e0c4a369d872ade4cbbb379e24beb27071b21be6b"} Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.989889 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06f8ebfe010f8c95515a176e0c4a369d872ade4cbbb379e24beb27071b21be6b" Nov 30 08:50:03 crc kubenswrapper[4941]: I1130 08:50:03.987356 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-fmfhx" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.103160 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-8ktj4"] Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.103842 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6e65b7b-aabd-4898-b48f-90a421e836a5" containerName="libvirt-openstack-openstack-cell1" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.103870 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6e65b7b-aabd-4898-b48f-90a421e836a5" containerName="libvirt-openstack-openstack-cell1" Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.103886 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="extract-content" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.103896 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="extract-content" Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.103921 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="extract-content" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.103930 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="extract-content" Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.103946 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="registry-server" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.103954 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="registry-server" Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.103976 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="extract-utilities" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.103985 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="extract-utilities" Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.103999 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="registry-server" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.104008 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="registry-server" Nov 30 08:50:04 crc kubenswrapper[4941]: E1130 08:50:04.104049 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="extract-utilities" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.104058 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="extract-utilities" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.104356 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6e65b7b-aabd-4898-b48f-90a421e836a5" containerName="libvirt-openstack-openstack-cell1" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.104378 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9567424-77e7-494a-b2dc-baeb30f5fcf5" containerName="registry-server" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.104394 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f171062f-71ae-4475-8efd-ec79ac2042ec" containerName="registry-server" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.105483 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.115459 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.115817 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.115896 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.115928 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.116098 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.116479 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.116495 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.136956 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-8ktj4"] Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.194354 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.194443 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.194477 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ceph\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.194518 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-inventory\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.194743 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.195023 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr7l9\" (UniqueName: \"kubernetes.io/projected/a342b77c-f58f-499e-9671-a67cd80d9e3e-kube-api-access-gr7l9\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.195361 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.195665 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.195771 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.196089 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.196169 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298447 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298526 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298554 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298608 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298635 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298691 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298726 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298749 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ceph\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298790 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-inventory\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298827 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.298876 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr7l9\" (UniqueName: \"kubernetes.io/projected/a342b77c-f58f-499e-9671-a67cd80d9e3e-kube-api-access-gr7l9\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.300434 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.300686 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.304145 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ceph\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.304833 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.305786 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.306417 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.306739 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.306795 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.307411 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.314158 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-inventory\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.330295 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr7l9\" (UniqueName: \"kubernetes.io/projected/a342b77c-f58f-499e-9671-a67cd80d9e3e-kube-api-access-gr7l9\") pod \"nova-cell1-openstack-openstack-cell1-8ktj4\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:04 crc kubenswrapper[4941]: I1130 08:50:04.436352 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:50:05 crc kubenswrapper[4941]: I1130 08:50:05.135744 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-8ktj4"] Nov 30 08:50:06 crc kubenswrapper[4941]: I1130 08:50:06.011884 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" event={"ID":"a342b77c-f58f-499e-9671-a67cd80d9e3e","Type":"ContainerStarted","Data":"3006747ef5f30f8a48fe47ccc5a86ca2bdbcc54ab99f2edd5c44dfc30df497bf"} Nov 30 08:50:07 crc kubenswrapper[4941]: I1130 08:50:07.027860 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" event={"ID":"a342b77c-f58f-499e-9671-a67cd80d9e3e","Type":"ContainerStarted","Data":"6a2f23d19ddc2ffa50d749556741f3f66f6a5f1e8161fa73425f6bc2e576f6e2"} Nov 30 08:50:07 crc kubenswrapper[4941]: I1130 08:50:07.072595 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" podStartSLOduration=2.36258854 podStartE2EDuration="3.072568832s" podCreationTimestamp="2025-11-30 08:50:04 +0000 UTC" firstStartedPulling="2025-11-30 08:50:05.13852105 +0000 UTC m=+7425.906692669" lastFinishedPulling="2025-11-30 08:50:05.848501352 +0000 UTC m=+7426.616672961" observedRunningTime="2025-11-30 08:50:07.061020435 +0000 UTC m=+7427.829192064" watchObservedRunningTime="2025-11-30 08:50:07.072568832 +0000 UTC m=+7427.840740451" Nov 30 08:50:32 crc kubenswrapper[4941]: I1130 08:50:32.979502 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:50:32 crc kubenswrapper[4941]: I1130 08:50:32.980826 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:50:32 crc kubenswrapper[4941]: I1130 08:50:32.980922 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:50:32 crc kubenswrapper[4941]: I1130 08:50:32.982628 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:50:32 crc kubenswrapper[4941]: I1130 08:50:32.982757 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" gracePeriod=600 Nov 30 08:50:33 crc kubenswrapper[4941]: E1130 08:50:33.125481 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:50:33 crc kubenswrapper[4941]: I1130 08:50:33.388779 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" exitCode=0 Nov 30 08:50:33 crc kubenswrapper[4941]: I1130 08:50:33.388855 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf"} Nov 30 08:50:33 crc kubenswrapper[4941]: I1130 08:50:33.388961 4941 scope.go:117] "RemoveContainer" containerID="83c532f6891919fbe101ca53964d4545bf26f192c23b11204d7a74e87bf589c4" Nov 30 08:50:33 crc kubenswrapper[4941]: I1130 08:50:33.390175 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:50:33 crc kubenswrapper[4941]: E1130 08:50:33.390744 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:50:46 crc kubenswrapper[4941]: I1130 08:50:46.523501 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:50:46 crc kubenswrapper[4941]: E1130 08:50:46.524986 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:50:59 crc kubenswrapper[4941]: I1130 08:50:59.544818 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:50:59 crc kubenswrapper[4941]: E1130 08:50:59.547730 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:51:11 crc kubenswrapper[4941]: I1130 08:51:11.524016 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:51:11 crc kubenswrapper[4941]: E1130 08:51:11.525467 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:51:24 crc kubenswrapper[4941]: I1130 08:51:24.523793 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:51:24 crc kubenswrapper[4941]: E1130 08:51:24.525190 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:51:38 crc kubenswrapper[4941]: I1130 08:51:38.523864 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:51:38 crc kubenswrapper[4941]: E1130 08:51:38.525659 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:51:53 crc kubenswrapper[4941]: I1130 08:51:53.522526 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:51:53 crc kubenswrapper[4941]: E1130 08:51:53.523987 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:52:08 crc kubenswrapper[4941]: I1130 08:52:08.523176 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:52:08 crc kubenswrapper[4941]: E1130 08:52:08.524348 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:52:22 crc kubenswrapper[4941]: I1130 08:52:22.523425 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:52:22 crc kubenswrapper[4941]: E1130 08:52:22.525446 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:52:37 crc kubenswrapper[4941]: I1130 08:52:37.522499 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:52:37 crc kubenswrapper[4941]: E1130 08:52:37.524017 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.522195 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:52:48 crc kubenswrapper[4941]: E1130 08:52:48.523524 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.677654 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s7ld2"] Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.683587 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.697173 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s7ld2"] Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.764994 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-utilities\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.765226 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-catalog-content\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.765344 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6zsq\" (UniqueName: \"kubernetes.io/projected/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-kube-api-access-p6zsq\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.867664 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6zsq\" (UniqueName: \"kubernetes.io/projected/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-kube-api-access-p6zsq\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.867795 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-utilities\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.867882 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-catalog-content\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.868430 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-catalog-content\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.868627 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-utilities\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:48 crc kubenswrapper[4941]: I1130 08:52:48.887423 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6zsq\" (UniqueName: \"kubernetes.io/projected/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-kube-api-access-p6zsq\") pod \"community-operators-s7ld2\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:49 crc kubenswrapper[4941]: I1130 08:52:49.010638 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:49 crc kubenswrapper[4941]: I1130 08:52:49.626803 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s7ld2"] Nov 30 08:52:50 crc kubenswrapper[4941]: I1130 08:52:50.258459 4941 generic.go:334] "Generic (PLEG): container finished" podID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerID="3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac" exitCode=0 Nov 30 08:52:50 crc kubenswrapper[4941]: I1130 08:52:50.258538 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerDied","Data":"3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac"} Nov 30 08:52:50 crc kubenswrapper[4941]: I1130 08:52:50.258601 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerStarted","Data":"1e580e109c28e9230c9276acbb28c20fbfca0b9fc6cd4a7dc20ff156d0fdb2bf"} Nov 30 08:52:52 crc kubenswrapper[4941]: I1130 08:52:52.289778 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerStarted","Data":"069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1"} Nov 30 08:52:53 crc kubenswrapper[4941]: I1130 08:52:53.302312 4941 generic.go:334] "Generic (PLEG): container finished" podID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerID="069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1" exitCode=0 Nov 30 08:52:53 crc kubenswrapper[4941]: I1130 08:52:53.302488 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerDied","Data":"069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1"} Nov 30 08:52:54 crc kubenswrapper[4941]: I1130 08:52:54.318211 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerStarted","Data":"d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a"} Nov 30 08:52:59 crc kubenswrapper[4941]: I1130 08:52:59.011786 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:59 crc kubenswrapper[4941]: I1130 08:52:59.012595 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:59 crc kubenswrapper[4941]: I1130 08:52:59.076752 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:59 crc kubenswrapper[4941]: I1130 08:52:59.095791 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s7ld2" podStartSLOduration=7.520858355 podStartE2EDuration="11.09577006s" podCreationTimestamp="2025-11-30 08:52:48 +0000 UTC" firstStartedPulling="2025-11-30 08:52:50.273648961 +0000 UTC m=+7591.041820590" lastFinishedPulling="2025-11-30 08:52:53.848560686 +0000 UTC m=+7594.616732295" observedRunningTime="2025-11-30 08:52:54.338797187 +0000 UTC m=+7595.106968806" watchObservedRunningTime="2025-11-30 08:52:59.09577006 +0000 UTC m=+7599.863941669" Nov 30 08:52:59 crc kubenswrapper[4941]: I1130 08:52:59.431317 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:52:59 crc kubenswrapper[4941]: I1130 08:52:59.482483 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s7ld2"] Nov 30 08:53:01 crc kubenswrapper[4941]: I1130 08:53:01.397643 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s7ld2" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="registry-server" containerID="cri-o://d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a" gracePeriod=2 Nov 30 08:53:01 crc kubenswrapper[4941]: I1130 08:53:01.946944 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:53:01 crc kubenswrapper[4941]: I1130 08:53:01.991966 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-catalog-content\") pod \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " Nov 30 08:53:01 crc kubenswrapper[4941]: I1130 08:53:01.992592 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-utilities\") pod \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " Nov 30 08:53:01 crc kubenswrapper[4941]: I1130 08:53:01.992714 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6zsq\" (UniqueName: \"kubernetes.io/projected/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-kube-api-access-p6zsq\") pod \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\" (UID: \"fb3a386c-68aa-4e2e-b2e0-4166b61129c5\") " Nov 30 08:53:01 crc kubenswrapper[4941]: I1130 08:53:01.994161 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-utilities" (OuterVolumeSpecName: "utilities") pod "fb3a386c-68aa-4e2e-b2e0-4166b61129c5" (UID: "fb3a386c-68aa-4e2e-b2e0-4166b61129c5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.002115 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-kube-api-access-p6zsq" (OuterVolumeSpecName: "kube-api-access-p6zsq") pod "fb3a386c-68aa-4e2e-b2e0-4166b61129c5" (UID: "fb3a386c-68aa-4e2e-b2e0-4166b61129c5"). InnerVolumeSpecName "kube-api-access-p6zsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.054266 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb3a386c-68aa-4e2e-b2e0-4166b61129c5" (UID: "fb3a386c-68aa-4e2e-b2e0-4166b61129c5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.096348 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.096397 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.096413 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6zsq\" (UniqueName: \"kubernetes.io/projected/fb3a386c-68aa-4e2e-b2e0-4166b61129c5-kube-api-access-p6zsq\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.412912 4941 generic.go:334] "Generic (PLEG): container finished" podID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerID="d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a" exitCode=0 Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.412975 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerDied","Data":"d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a"} Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.413013 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s7ld2" event={"ID":"fb3a386c-68aa-4e2e-b2e0-4166b61129c5","Type":"ContainerDied","Data":"1e580e109c28e9230c9276acbb28c20fbfca0b9fc6cd4a7dc20ff156d0fdb2bf"} Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.413048 4941 scope.go:117] "RemoveContainer" containerID="d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.413562 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s7ld2" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.451279 4941 scope.go:117] "RemoveContainer" containerID="069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.468597 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s7ld2"] Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.483037 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s7ld2"] Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.493175 4941 scope.go:117] "RemoveContainer" containerID="3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.556400 4941 scope.go:117] "RemoveContainer" containerID="d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a" Nov 30 08:53:02 crc kubenswrapper[4941]: E1130 08:53:02.557170 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a\": container with ID starting with d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a not found: ID does not exist" containerID="d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.557264 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a"} err="failed to get container status \"d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a\": rpc error: code = NotFound desc = could not find container \"d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a\": container with ID starting with d5fd4579347b1911b6fd7d9ac5627a5e992e63f3059d67c66aba15967c936c3a not found: ID does not exist" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.557307 4941 scope.go:117] "RemoveContainer" containerID="069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1" Nov 30 08:53:02 crc kubenswrapper[4941]: E1130 08:53:02.557897 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1\": container with ID starting with 069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1 not found: ID does not exist" containerID="069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.557960 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1"} err="failed to get container status \"069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1\": rpc error: code = NotFound desc = could not find container \"069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1\": container with ID starting with 069227959bc1172cb53c0ba64bc420ad1e609d0b460406057b499608e6b778c1 not found: ID does not exist" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.558002 4941 scope.go:117] "RemoveContainer" containerID="3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac" Nov 30 08:53:02 crc kubenswrapper[4941]: E1130 08:53:02.558579 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac\": container with ID starting with 3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac not found: ID does not exist" containerID="3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac" Nov 30 08:53:02 crc kubenswrapper[4941]: I1130 08:53:02.558657 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac"} err="failed to get container status \"3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac\": rpc error: code = NotFound desc = could not find container \"3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac\": container with ID starting with 3d7a39acf82146d2860ba7186a5c4834bba6037d374b3cf1a38a91b0005377ac not found: ID does not exist" Nov 30 08:53:03 crc kubenswrapper[4941]: I1130 08:53:03.521519 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:53:03 crc kubenswrapper[4941]: E1130 08:53:03.522028 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:53:03 crc kubenswrapper[4941]: I1130 08:53:03.533542 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" path="/var/lib/kubelet/pods/fb3a386c-68aa-4e2e-b2e0-4166b61129c5/volumes" Nov 30 08:53:15 crc kubenswrapper[4941]: I1130 08:53:15.522552 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:53:15 crc kubenswrapper[4941]: E1130 08:53:15.523634 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:53:25 crc kubenswrapper[4941]: I1130 08:53:25.771874 4941 generic.go:334] "Generic (PLEG): container finished" podID="a342b77c-f58f-499e-9671-a67cd80d9e3e" containerID="6a2f23d19ddc2ffa50d749556741f3f66f6a5f1e8161fa73425f6bc2e576f6e2" exitCode=0 Nov 30 08:53:25 crc kubenswrapper[4941]: I1130 08:53:25.773163 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" event={"ID":"a342b77c-f58f-499e-9671-a67cd80d9e3e","Type":"ContainerDied","Data":"6a2f23d19ddc2ffa50d749556741f3f66f6a5f1e8161fa73425f6bc2e576f6e2"} Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.294557 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338010 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-1\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338215 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ssh-key\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338245 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-1\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338275 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-inventory\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338346 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr7l9\" (UniqueName: \"kubernetes.io/projected/a342b77c-f58f-499e-9671-a67cd80d9e3e-kube-api-access-gr7l9\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338420 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-combined-ca-bundle\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338442 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-1\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338471 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-0\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338494 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-0\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338571 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-0\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.338658 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ceph\") pod \"a342b77c-f58f-499e-9671-a67cd80d9e3e\" (UID: \"a342b77c-f58f-499e-9671-a67cd80d9e3e\") " Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.358024 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a342b77c-f58f-499e-9671-a67cd80d9e3e-kube-api-access-gr7l9" (OuterVolumeSpecName: "kube-api-access-gr7l9") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "kube-api-access-gr7l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.358925 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ceph" (OuterVolumeSpecName: "ceph") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.387732 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.388064 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.395133 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.406104 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.409505 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.410862 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.423768 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-inventory" (OuterVolumeSpecName: "inventory") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.425204 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.429813 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "a342b77c-f58f-499e-9671-a67cd80d9e3e" (UID: "a342b77c-f58f-499e-9671-a67cd80d9e3e"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440876 4941 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440914 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440928 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440940 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440950 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440959 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440969 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr7l9\" (UniqueName: \"kubernetes.io/projected/a342b77c-f58f-499e-9671-a67cd80d9e3e-kube-api-access-gr7l9\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440977 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440986 4941 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.440996 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.441005 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/a342b77c-f58f-499e-9671-a67cd80d9e3e-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.800086 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" event={"ID":"a342b77c-f58f-499e-9671-a67cd80d9e3e","Type":"ContainerDied","Data":"3006747ef5f30f8a48fe47ccc5a86ca2bdbcc54ab99f2edd5c44dfc30df497bf"} Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.800573 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3006747ef5f30f8a48fe47ccc5a86ca2bdbcc54ab99f2edd5c44dfc30df497bf" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.800661 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-8ktj4" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.943468 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-57swc"] Nov 30 08:53:27 crc kubenswrapper[4941]: E1130 08:53:27.944383 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="extract-utilities" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.944485 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="extract-utilities" Nov 30 08:53:27 crc kubenswrapper[4941]: E1130 08:53:27.944573 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a342b77c-f58f-499e-9671-a67cd80d9e3e" containerName="nova-cell1-openstack-openstack-cell1" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.944675 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a342b77c-f58f-499e-9671-a67cd80d9e3e" containerName="nova-cell1-openstack-openstack-cell1" Nov 30 08:53:27 crc kubenswrapper[4941]: E1130 08:53:27.944795 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="extract-content" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.944888 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="extract-content" Nov 30 08:53:27 crc kubenswrapper[4941]: E1130 08:53:27.944991 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="registry-server" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.945079 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="registry-server" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.945505 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3a386c-68aa-4e2e-b2e0-4166b61129c5" containerName="registry-server" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.945624 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a342b77c-f58f-499e-9671-a67cd80d9e3e" containerName="nova-cell1-openstack-openstack-cell1" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.946725 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.949461 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.949778 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.949863 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.950031 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.952116 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:53:27 crc kubenswrapper[4941]: I1130 08:53:27.959459 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-57swc"] Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057169 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057434 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ssh-key\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057470 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjgg4\" (UniqueName: \"kubernetes.io/projected/ba4069f3-4792-4a09-a693-9c1f38b2514c-kube-api-access-kjgg4\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057513 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceph\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057571 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057593 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057623 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-inventory\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.057854 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162652 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ssh-key\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162723 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjgg4\" (UniqueName: \"kubernetes.io/projected/ba4069f3-4792-4a09-a693-9c1f38b2514c-kube-api-access-kjgg4\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162775 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceph\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162843 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162865 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162901 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-inventory\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162926 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.162967 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.168882 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-inventory\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.168997 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.169430 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.169522 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ssh-key\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.171174 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceph\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.172587 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.176414 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.185759 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjgg4\" (UniqueName: \"kubernetes.io/projected/ba4069f3-4792-4a09-a693-9c1f38b2514c-kube-api-access-kjgg4\") pod \"telemetry-openstack-openstack-cell1-57swc\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.273887 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.523695 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:53:28 crc kubenswrapper[4941]: E1130 08:53:28.524472 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:53:28 crc kubenswrapper[4941]: I1130 08:53:28.927298 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-57swc"] Nov 30 08:53:29 crc kubenswrapper[4941]: I1130 08:53:29.830003 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-57swc" event={"ID":"ba4069f3-4792-4a09-a693-9c1f38b2514c","Type":"ContainerStarted","Data":"32a35830c106a714b8a19d90662592c228579dceb21cebd5d709c4871f771363"} Nov 30 08:53:30 crc kubenswrapper[4941]: I1130 08:53:30.848382 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-57swc" event={"ID":"ba4069f3-4792-4a09-a693-9c1f38b2514c","Type":"ContainerStarted","Data":"22bcd330199ebef8066b685fef0e700c7cf5864759d92dc551fc57f5c17920d3"} Nov 30 08:53:30 crc kubenswrapper[4941]: I1130 08:53:30.878877 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-57swc" podStartSLOduration=3.207386919 podStartE2EDuration="3.87885342s" podCreationTimestamp="2025-11-30 08:53:27 +0000 UTC" firstStartedPulling="2025-11-30 08:53:28.941995592 +0000 UTC m=+7629.710167201" lastFinishedPulling="2025-11-30 08:53:29.613462053 +0000 UTC m=+7630.381633702" observedRunningTime="2025-11-30 08:53:30.871624537 +0000 UTC m=+7631.639796186" watchObservedRunningTime="2025-11-30 08:53:30.87885342 +0000 UTC m=+7631.647025019" Nov 30 08:53:42 crc kubenswrapper[4941]: I1130 08:53:42.522149 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:53:42 crc kubenswrapper[4941]: E1130 08:53:42.523170 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:53:54 crc kubenswrapper[4941]: I1130 08:53:54.521790 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:53:54 crc kubenswrapper[4941]: E1130 08:53:54.523097 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:54:09 crc kubenswrapper[4941]: I1130 08:54:09.538130 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:54:09 crc kubenswrapper[4941]: E1130 08:54:09.541795 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:54:23 crc kubenswrapper[4941]: I1130 08:54:23.523218 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:54:23 crc kubenswrapper[4941]: E1130 08:54:23.524777 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:54:34 crc kubenswrapper[4941]: I1130 08:54:34.522863 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:54:34 crc kubenswrapper[4941]: E1130 08:54:34.523727 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:54:45 crc kubenswrapper[4941]: I1130 08:54:45.525574 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:54:45 crc kubenswrapper[4941]: E1130 08:54:45.527248 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:54:57 crc kubenswrapper[4941]: I1130 08:54:57.522161 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:54:57 crc kubenswrapper[4941]: E1130 08:54:57.523674 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:55:10 crc kubenswrapper[4941]: I1130 08:55:10.522062 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:55:10 crc kubenswrapper[4941]: E1130 08:55:10.524102 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:55:24 crc kubenswrapper[4941]: I1130 08:55:24.522998 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:55:24 crc kubenswrapper[4941]: E1130 08:55:24.524277 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 08:55:37 crc kubenswrapper[4941]: I1130 08:55:37.522530 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:55:38 crc kubenswrapper[4941]: I1130 08:55:38.552350 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"f6761dbd1bbcf1b92940c5b8baec59e6668797156eef4bf7a2a454a505d37305"} Nov 30 08:57:38 crc kubenswrapper[4941]: I1130 08:57:38.382056 4941 generic.go:334] "Generic (PLEG): container finished" podID="ba4069f3-4792-4a09-a693-9c1f38b2514c" containerID="22bcd330199ebef8066b685fef0e700c7cf5864759d92dc551fc57f5c17920d3" exitCode=0 Nov 30 08:57:38 crc kubenswrapper[4941]: I1130 08:57:38.382252 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-57swc" event={"ID":"ba4069f3-4792-4a09-a693-9c1f38b2514c","Type":"ContainerDied","Data":"22bcd330199ebef8066b685fef0e700c7cf5864759d92dc551fc57f5c17920d3"} Nov 30 08:57:39 crc kubenswrapper[4941]: E1130 08:57:39.765130 4941 kubelet_node_status.go:756] "Failed to set some node status fields" err="failed to validate nodeIP: route ip+net: no such network interface" node="crc" Nov 30 08:57:39 crc kubenswrapper[4941]: I1130 08:57:39.994638 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.064427 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjgg4\" (UniqueName: \"kubernetes.io/projected/ba4069f3-4792-4a09-a693-9c1f38b2514c-kube-api-access-kjgg4\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.064698 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-2\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.066286 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-telemetry-combined-ca-bundle\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.066416 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceph\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.066521 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-0\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.066657 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ssh-key\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.066801 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-1\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.067043 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-inventory\") pod \"ba4069f3-4792-4a09-a693-9c1f38b2514c\" (UID: \"ba4069f3-4792-4a09-a693-9c1f38b2514c\") " Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.092807 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.093675 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4069f3-4792-4a09-a693-9c1f38b2514c-kube-api-access-kjgg4" (OuterVolumeSpecName: "kube-api-access-kjgg4") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "kube-api-access-kjgg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.095207 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceph" (OuterVolumeSpecName: "ceph") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.104554 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.122058 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.124888 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.139506 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-inventory" (OuterVolumeSpecName: "inventory") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.143494 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "ba4069f3-4792-4a09-a693-9c1f38b2514c" (UID: "ba4069f3-4792-4a09-a693-9c1f38b2514c"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.170854 4941 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171210 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171221 4941 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171233 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171244 4941 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171255 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171265 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjgg4\" (UniqueName: \"kubernetes.io/projected/ba4069f3-4792-4a09-a693-9c1f38b2514c-kube-api-access-kjgg4\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.171274 4941 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/ba4069f3-4792-4a09-a693-9c1f38b2514c-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.410837 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-57swc" event={"ID":"ba4069f3-4792-4a09-a693-9c1f38b2514c","Type":"ContainerDied","Data":"32a35830c106a714b8a19d90662592c228579dceb21cebd5d709c4871f771363"} Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.410893 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32a35830c106a714b8a19d90662592c228579dceb21cebd5d709c4871f771363" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.411750 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-57swc" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.548092 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-cvf6p"] Nov 30 08:57:40 crc kubenswrapper[4941]: E1130 08:57:40.548921 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba4069f3-4792-4a09-a693-9c1f38b2514c" containerName="telemetry-openstack-openstack-cell1" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.548947 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba4069f3-4792-4a09-a693-9c1f38b2514c" containerName="telemetry-openstack-openstack-cell1" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.549205 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba4069f3-4792-4a09-a693-9c1f38b2514c" containerName="telemetry-openstack-openstack-cell1" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.550103 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.553671 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.554696 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.556901 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.556977 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.557044 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.572012 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-cvf6p"] Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.691100 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.691899 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.691938 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz8h9\" (UniqueName: \"kubernetes.io/projected/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-kube-api-access-dz8h9\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.692165 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.692290 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.692978 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.796234 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.796421 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz8h9\" (UniqueName: \"kubernetes.io/projected/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-kube-api-access-dz8h9\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.796459 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.796604 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.796706 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.796769 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.802501 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.802898 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.802906 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.803692 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.804503 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.823060 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz8h9\" (UniqueName: \"kubernetes.io/projected/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-kube-api-access-dz8h9\") pod \"neutron-sriov-openstack-openstack-cell1-cvf6p\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:40 crc kubenswrapper[4941]: I1130 08:57:40.874025 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 08:57:41 crc kubenswrapper[4941]: I1130 08:57:41.583545 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-cvf6p"] Nov 30 08:57:41 crc kubenswrapper[4941]: I1130 08:57:41.596242 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 08:57:42 crc kubenswrapper[4941]: I1130 08:57:42.439635 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" event={"ID":"34bc5f99-c870-4bc0-9873-b87d4f3fa30a","Type":"ContainerStarted","Data":"466a8b10dbb844f80974f610f9e8baf11a447aa2358033a5cb7535d0ceb92cd5"} Nov 30 08:57:42 crc kubenswrapper[4941]: I1130 08:57:42.440236 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" event={"ID":"34bc5f99-c870-4bc0-9873-b87d4f3fa30a","Type":"ContainerStarted","Data":"65d3c32c4d8e58227c92052fd24087a0f54ccc2d1877351f38dc83fe4bbdc15b"} Nov 30 08:57:42 crc kubenswrapper[4941]: I1130 08:57:42.475682 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" podStartSLOduration=1.972596097 podStartE2EDuration="2.475654364s" podCreationTimestamp="2025-11-30 08:57:40 +0000 UTC" firstStartedPulling="2025-11-30 08:57:41.595966837 +0000 UTC m=+7882.364138446" lastFinishedPulling="2025-11-30 08:57:42.099025104 +0000 UTC m=+7882.867196713" observedRunningTime="2025-11-30 08:57:42.466274904 +0000 UTC m=+7883.234446523" watchObservedRunningTime="2025-11-30 08:57:42.475654364 +0000 UTC m=+7883.243825983" Nov 30 08:58:02 crc kubenswrapper[4941]: I1130 08:58:02.978927 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:58:02 crc kubenswrapper[4941]: I1130 08:58:02.979793 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:58:32 crc kubenswrapper[4941]: I1130 08:58:32.979379 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:58:32 crc kubenswrapper[4941]: I1130 08:58:32.980203 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:59:02 crc kubenswrapper[4941]: I1130 08:59:02.979484 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 08:59:02 crc kubenswrapper[4941]: I1130 08:59:02.980430 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 08:59:02 crc kubenswrapper[4941]: I1130 08:59:02.980516 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 08:59:02 crc kubenswrapper[4941]: I1130 08:59:02.982034 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f6761dbd1bbcf1b92940c5b8baec59e6668797156eef4bf7a2a454a505d37305"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 08:59:02 crc kubenswrapper[4941]: I1130 08:59:02.982149 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://f6761dbd1bbcf1b92940c5b8baec59e6668797156eef4bf7a2a454a505d37305" gracePeriod=600 Nov 30 08:59:03 crc kubenswrapper[4941]: I1130 08:59:03.597148 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="f6761dbd1bbcf1b92940c5b8baec59e6668797156eef4bf7a2a454a505d37305" exitCode=0 Nov 30 08:59:03 crc kubenswrapper[4941]: I1130 08:59:03.597235 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"f6761dbd1bbcf1b92940c5b8baec59e6668797156eef4bf7a2a454a505d37305"} Nov 30 08:59:03 crc kubenswrapper[4941]: I1130 08:59:03.597746 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162"} Nov 30 08:59:03 crc kubenswrapper[4941]: I1130 08:59:03.597784 4941 scope.go:117] "RemoveContainer" containerID="1f3fc12c0b1ba37836c808dd57aafa8150452a888416b4ca67f5d9f03a303adf" Nov 30 08:59:39 crc kubenswrapper[4941]: I1130 08:59:39.833252 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s5zf7"] Nov 30 08:59:39 crc kubenswrapper[4941]: I1130 08:59:39.847825 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:39 crc kubenswrapper[4941]: I1130 08:59:39.895619 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s5zf7"] Nov 30 08:59:39 crc kubenswrapper[4941]: I1130 08:59:39.920572 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5j8v\" (UniqueName: \"kubernetes.io/projected/92519cda-0730-4b25-b85a-76843c2cc17f-kube-api-access-z5j8v\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:39 crc kubenswrapper[4941]: I1130 08:59:39.920988 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-utilities\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:39 crc kubenswrapper[4941]: I1130 08:59:39.921064 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-catalog-content\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.024379 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-utilities\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.024495 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-catalog-content\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.025148 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-utilities\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.025217 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-catalog-content\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.025400 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5j8v\" (UniqueName: \"kubernetes.io/projected/92519cda-0730-4b25-b85a-76843c2cc17f-kube-api-access-z5j8v\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.056976 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5j8v\" (UniqueName: \"kubernetes.io/projected/92519cda-0730-4b25-b85a-76843c2cc17f-kube-api-access-z5j8v\") pod \"redhat-operators-s5zf7\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.177736 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:40 crc kubenswrapper[4941]: I1130 08:59:40.722316 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s5zf7"] Nov 30 08:59:41 crc kubenswrapper[4941]: I1130 08:59:41.195039 4941 generic.go:334] "Generic (PLEG): container finished" podID="92519cda-0730-4b25-b85a-76843c2cc17f" containerID="b2acd208a889a510f6476a0dd109bfb92ac4f537c6874a156ebba2028b104c20" exitCode=0 Nov 30 08:59:41 crc kubenswrapper[4941]: I1130 08:59:41.195122 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerDied","Data":"b2acd208a889a510f6476a0dd109bfb92ac4f537c6874a156ebba2028b104c20"} Nov 30 08:59:41 crc kubenswrapper[4941]: I1130 08:59:41.195512 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerStarted","Data":"68bcb2f50611d0b10b608a70e8f24ff07f8042a33d3627302e1d158cc433b499"} Nov 30 08:59:43 crc kubenswrapper[4941]: I1130 08:59:43.223853 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerStarted","Data":"2d3a3650f95edca311b7ea1c0e5304a402338697dabf50d9219b83cd5c40ec20"} Nov 30 08:59:46 crc kubenswrapper[4941]: I1130 08:59:46.277864 4941 generic.go:334] "Generic (PLEG): container finished" podID="92519cda-0730-4b25-b85a-76843c2cc17f" containerID="2d3a3650f95edca311b7ea1c0e5304a402338697dabf50d9219b83cd5c40ec20" exitCode=0 Nov 30 08:59:46 crc kubenswrapper[4941]: I1130 08:59:46.277995 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerDied","Data":"2d3a3650f95edca311b7ea1c0e5304a402338697dabf50d9219b83cd5c40ec20"} Nov 30 08:59:47 crc kubenswrapper[4941]: I1130 08:59:47.292983 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerStarted","Data":"f5eb6dbb6299a08f3594fc4c2f0cbe88c3e852d49664a3d400b551c95eaecd9e"} Nov 30 08:59:47 crc kubenswrapper[4941]: I1130 08:59:47.332513 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s5zf7" podStartSLOduration=2.6986802 podStartE2EDuration="8.332309639s" podCreationTimestamp="2025-11-30 08:59:39 +0000 UTC" firstStartedPulling="2025-11-30 08:59:41.197370547 +0000 UTC m=+8001.965542156" lastFinishedPulling="2025-11-30 08:59:46.830999986 +0000 UTC m=+8007.599171595" observedRunningTime="2025-11-30 08:59:47.320724771 +0000 UTC m=+8008.088896380" watchObservedRunningTime="2025-11-30 08:59:47.332309639 +0000 UTC m=+8008.100481258" Nov 30 08:59:50 crc kubenswrapper[4941]: I1130 08:59:50.177893 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:50 crc kubenswrapper[4941]: I1130 08:59:50.178178 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 08:59:51 crc kubenswrapper[4941]: I1130 08:59:51.243926 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-s5zf7" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="registry-server" probeResult="failure" output=< Nov 30 08:59:51 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 08:59:51 crc kubenswrapper[4941]: > Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.076655 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5nk77"] Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.081341 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.094783 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nk77"] Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.219807 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-utilities\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.220101 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-catalog-content\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.220992 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzl89\" (UniqueName: \"kubernetes.io/projected/aac813b8-825e-435d-b3e3-a2a9319b559b-kube-api-access-gzl89\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.325466 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzl89\" (UniqueName: \"kubernetes.io/projected/aac813b8-825e-435d-b3e3-a2a9319b559b-kube-api-access-gzl89\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.325908 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-utilities\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.326563 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-catalog-content\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.326438 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-utilities\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.326875 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-catalog-content\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.353608 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzl89\" (UniqueName: \"kubernetes.io/projected/aac813b8-825e-435d-b3e3-a2a9319b559b-kube-api-access-gzl89\") pod \"redhat-marketplace-5nk77\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:56 crc kubenswrapper[4941]: I1130 08:59:56.417474 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 08:59:57 crc kubenswrapper[4941]: I1130 08:59:57.505965 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nk77"] Nov 30 08:59:58 crc kubenswrapper[4941]: I1130 08:59:58.450555 4941 generic.go:334] "Generic (PLEG): container finished" podID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerID="394d02ed5f2b3a4fd6994725dd14967ef659898f1c04e2f177a8e77ed7b8a16a" exitCode=0 Nov 30 08:59:58 crc kubenswrapper[4941]: I1130 08:59:58.450665 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nk77" event={"ID":"aac813b8-825e-435d-b3e3-a2a9319b559b","Type":"ContainerDied","Data":"394d02ed5f2b3a4fd6994725dd14967ef659898f1c04e2f177a8e77ed7b8a16a"} Nov 30 08:59:58 crc kubenswrapper[4941]: I1130 08:59:58.451024 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nk77" event={"ID":"aac813b8-825e-435d-b3e3-a2a9319b559b","Type":"ContainerStarted","Data":"24fe8524f4555a8af1d9be62cdb60b51de57002dd7ea763bfba3a043497f8d62"} Nov 30 09:00:00 crc kubenswrapper[4941]: E1130 09:00:00.082396 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaac813b8_825e_435d_b3e3_a2a9319b559b.slice/crio-1838a1b79e3626c14d74fb3b62fbb2c3f7af3bc89f70c26b0fca306eaeb5ddad.scope\": RecentStats: unable to find data in memory cache]" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.182680 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz"] Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.184814 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.189208 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.190057 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.206518 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz"] Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.239240 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.306904 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.328319 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v5ts\" (UniqueName: \"kubernetes.io/projected/a53a92e6-6fae-45a9-841a-492ef07c2aa1-kube-api-access-8v5ts\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.329772 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a53a92e6-6fae-45a9-841a-492ef07c2aa1-secret-volume\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.329929 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a53a92e6-6fae-45a9-841a-492ef07c2aa1-config-volume\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.431667 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a53a92e6-6fae-45a9-841a-492ef07c2aa1-secret-volume\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.431739 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a53a92e6-6fae-45a9-841a-492ef07c2aa1-config-volume\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.431793 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v5ts\" (UniqueName: \"kubernetes.io/projected/a53a92e6-6fae-45a9-841a-492ef07c2aa1-kube-api-access-8v5ts\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.435023 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a53a92e6-6fae-45a9-841a-492ef07c2aa1-config-volume\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.438864 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a53a92e6-6fae-45a9-841a-492ef07c2aa1-secret-volume\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.452425 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v5ts\" (UniqueName: \"kubernetes.io/projected/a53a92e6-6fae-45a9-841a-492ef07c2aa1-kube-api-access-8v5ts\") pod \"collect-profiles-29408220-88vvz\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.482196 4941 generic.go:334] "Generic (PLEG): container finished" podID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerID="1838a1b79e3626c14d74fb3b62fbb2c3f7af3bc89f70c26b0fca306eaeb5ddad" exitCode=0 Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.482443 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nk77" event={"ID":"aac813b8-825e-435d-b3e3-a2a9319b559b","Type":"ContainerDied","Data":"1838a1b79e3626c14d74fb3b62fbb2c3f7af3bc89f70c26b0fca306eaeb5ddad"} Nov 30 09:00:00 crc kubenswrapper[4941]: I1130 09:00:00.524508 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:01 crc kubenswrapper[4941]: I1130 09:00:01.030286 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz"] Nov 30 09:00:01 crc kubenswrapper[4941]: W1130 09:00:01.031891 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda53a92e6_6fae_45a9_841a_492ef07c2aa1.slice/crio-c882109c293872c3201812ffc3893245327345589a9ce0c9a6695f937c773e2b WatchSource:0}: Error finding container c882109c293872c3201812ffc3893245327345589a9ce0c9a6695f937c773e2b: Status 404 returned error can't find the container with id c882109c293872c3201812ffc3893245327345589a9ce0c9a6695f937c773e2b Nov 30 09:00:01 crc kubenswrapper[4941]: I1130 09:00:01.497228 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nk77" event={"ID":"aac813b8-825e-435d-b3e3-a2a9319b559b","Type":"ContainerStarted","Data":"24096c54537cb029608077d76fe01ad82dca35e01895f60437ba0ac9d18adf30"} Nov 30 09:00:01 crc kubenswrapper[4941]: I1130 09:00:01.499570 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" event={"ID":"a53a92e6-6fae-45a9-841a-492ef07c2aa1","Type":"ContainerStarted","Data":"e5448b48d8c6146e9a225fbbd925f253e63ebc00ac20675ac775b7b81fc711f5"} Nov 30 09:00:01 crc kubenswrapper[4941]: I1130 09:00:01.499654 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" event={"ID":"a53a92e6-6fae-45a9-841a-492ef07c2aa1","Type":"ContainerStarted","Data":"c882109c293872c3201812ffc3893245327345589a9ce0c9a6695f937c773e2b"} Nov 30 09:00:01 crc kubenswrapper[4941]: I1130 09:00:01.521924 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5nk77" podStartSLOduration=2.8190732990000003 podStartE2EDuration="5.521901011s" podCreationTimestamp="2025-11-30 08:59:56 +0000 UTC" firstStartedPulling="2025-11-30 08:59:58.454735769 +0000 UTC m=+8019.222907378" lastFinishedPulling="2025-11-30 09:00:01.157563471 +0000 UTC m=+8021.925735090" observedRunningTime="2025-11-30 09:00:01.518089363 +0000 UTC m=+8022.286260972" watchObservedRunningTime="2025-11-30 09:00:01.521901011 +0000 UTC m=+8022.290072620" Nov 30 09:00:01 crc kubenswrapper[4941]: I1130 09:00:01.568764 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" podStartSLOduration=1.568731688 podStartE2EDuration="1.568731688s" podCreationTimestamp="2025-11-30 09:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:00:01.554072075 +0000 UTC m=+8022.322243684" watchObservedRunningTime="2025-11-30 09:00:01.568731688 +0000 UTC m=+8022.336903297" Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.243439 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s5zf7"] Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.244546 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s5zf7" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="registry-server" containerID="cri-o://f5eb6dbb6299a08f3594fc4c2f0cbe88c3e852d49664a3d400b551c95eaecd9e" gracePeriod=2 Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.518046 4941 generic.go:334] "Generic (PLEG): container finished" podID="92519cda-0730-4b25-b85a-76843c2cc17f" containerID="f5eb6dbb6299a08f3594fc4c2f0cbe88c3e852d49664a3d400b551c95eaecd9e" exitCode=0 Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.518156 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerDied","Data":"f5eb6dbb6299a08f3594fc4c2f0cbe88c3e852d49664a3d400b551c95eaecd9e"} Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.523834 4941 generic.go:334] "Generic (PLEG): container finished" podID="a53a92e6-6fae-45a9-841a-492ef07c2aa1" containerID="e5448b48d8c6146e9a225fbbd925f253e63ebc00ac20675ac775b7b81fc711f5" exitCode=0 Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.525987 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" event={"ID":"a53a92e6-6fae-45a9-841a-492ef07c2aa1","Type":"ContainerDied","Data":"e5448b48d8c6146e9a225fbbd925f253e63ebc00ac20675ac775b7b81fc711f5"} Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.794490 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.818677 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5j8v\" (UniqueName: \"kubernetes.io/projected/92519cda-0730-4b25-b85a-76843c2cc17f-kube-api-access-z5j8v\") pod \"92519cda-0730-4b25-b85a-76843c2cc17f\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.819094 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-catalog-content\") pod \"92519cda-0730-4b25-b85a-76843c2cc17f\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.819363 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-utilities\") pod \"92519cda-0730-4b25-b85a-76843c2cc17f\" (UID: \"92519cda-0730-4b25-b85a-76843c2cc17f\") " Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.821808 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-utilities" (OuterVolumeSpecName: "utilities") pod "92519cda-0730-4b25-b85a-76843c2cc17f" (UID: "92519cda-0730-4b25-b85a-76843c2cc17f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.847412 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92519cda-0730-4b25-b85a-76843c2cc17f-kube-api-access-z5j8v" (OuterVolumeSpecName: "kube-api-access-z5j8v") pod "92519cda-0730-4b25-b85a-76843c2cc17f" (UID: "92519cda-0730-4b25-b85a-76843c2cc17f"). InnerVolumeSpecName "kube-api-access-z5j8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.922763 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.922806 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5j8v\" (UniqueName: \"kubernetes.io/projected/92519cda-0730-4b25-b85a-76843c2cc17f-kube-api-access-z5j8v\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:02 crc kubenswrapper[4941]: I1130 09:00:02.963266 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "92519cda-0730-4b25-b85a-76843c2cc17f" (UID: "92519cda-0730-4b25-b85a-76843c2cc17f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.024697 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92519cda-0730-4b25-b85a-76843c2cc17f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.542750 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5zf7" Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.550652 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5zf7" event={"ID":"92519cda-0730-4b25-b85a-76843c2cc17f","Type":"ContainerDied","Data":"68bcb2f50611d0b10b608a70e8f24ff07f8042a33d3627302e1d158cc433b499"} Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.551211 4941 scope.go:117] "RemoveContainer" containerID="f5eb6dbb6299a08f3594fc4c2f0cbe88c3e852d49664a3d400b551c95eaecd9e" Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.588088 4941 scope.go:117] "RemoveContainer" containerID="2d3a3650f95edca311b7ea1c0e5304a402338697dabf50d9219b83cd5c40ec20" Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.638932 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s5zf7"] Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.667629 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s5zf7"] Nov 30 09:00:03 crc kubenswrapper[4941]: I1130 09:00:03.676392 4941 scope.go:117] "RemoveContainer" containerID="b2acd208a889a510f6476a0dd109bfb92ac4f537c6874a156ebba2028b104c20" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.009577 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.157575 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a53a92e6-6fae-45a9-841a-492ef07c2aa1-secret-volume\") pod \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.157929 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8v5ts\" (UniqueName: \"kubernetes.io/projected/a53a92e6-6fae-45a9-841a-492ef07c2aa1-kube-api-access-8v5ts\") pod \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.158570 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a53a92e6-6fae-45a9-841a-492ef07c2aa1-config-volume\") pod \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\" (UID: \"a53a92e6-6fae-45a9-841a-492ef07c2aa1\") " Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.159428 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a53a92e6-6fae-45a9-841a-492ef07c2aa1-config-volume" (OuterVolumeSpecName: "config-volume") pod "a53a92e6-6fae-45a9-841a-492ef07c2aa1" (UID: "a53a92e6-6fae-45a9-841a-492ef07c2aa1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.160193 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a53a92e6-6fae-45a9-841a-492ef07c2aa1-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.164604 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a53a92e6-6fae-45a9-841a-492ef07c2aa1-kube-api-access-8v5ts" (OuterVolumeSpecName: "kube-api-access-8v5ts") pod "a53a92e6-6fae-45a9-841a-492ef07c2aa1" (UID: "a53a92e6-6fae-45a9-841a-492ef07c2aa1"). InnerVolumeSpecName "kube-api-access-8v5ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.166214 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53a92e6-6fae-45a9-841a-492ef07c2aa1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a53a92e6-6fae-45a9-841a-492ef07c2aa1" (UID: "a53a92e6-6fae-45a9-841a-492ef07c2aa1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.262518 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8v5ts\" (UniqueName: \"kubernetes.io/projected/a53a92e6-6fae-45a9-841a-492ef07c2aa1-kube-api-access-8v5ts\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.262561 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a53a92e6-6fae-45a9-841a-492ef07c2aa1-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.570161 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" event={"ID":"a53a92e6-6fae-45a9-841a-492ef07c2aa1","Type":"ContainerDied","Data":"c882109c293872c3201812ffc3893245327345589a9ce0c9a6695f937c773e2b"} Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.570228 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c882109c293872c3201812ffc3893245327345589a9ce0c9a6695f937c773e2b" Nov 30 09:00:04 crc kubenswrapper[4941]: I1130 09:00:04.570184 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408220-88vvz" Nov 30 09:00:05 crc kubenswrapper[4941]: I1130 09:00:05.117871 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w"] Nov 30 09:00:05 crc kubenswrapper[4941]: I1130 09:00:05.132510 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408175-wxv4w"] Nov 30 09:00:05 crc kubenswrapper[4941]: I1130 09:00:05.539288 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8033082e-7879-45b1-bf6d-0a8ebccef646" path="/var/lib/kubelet/pods/8033082e-7879-45b1-bf6d-0a8ebccef646/volumes" Nov 30 09:00:05 crc kubenswrapper[4941]: I1130 09:00:05.540369 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" path="/var/lib/kubelet/pods/92519cda-0730-4b25-b85a-76843c2cc17f/volumes" Nov 30 09:00:06 crc kubenswrapper[4941]: I1130 09:00:06.418317 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 09:00:06 crc kubenswrapper[4941]: I1130 09:00:06.418886 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 09:00:06 crc kubenswrapper[4941]: I1130 09:00:06.493222 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 09:00:06 crc kubenswrapper[4941]: I1130 09:00:06.660221 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.049848 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tg6kf"] Nov 30 09:00:09 crc kubenswrapper[4941]: E1130 09:00:09.051190 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="extract-content" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.051206 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="extract-content" Nov 30 09:00:09 crc kubenswrapper[4941]: E1130 09:00:09.051219 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="extract-utilities" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.051226 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="extract-utilities" Nov 30 09:00:09 crc kubenswrapper[4941]: E1130 09:00:09.051261 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a53a92e6-6fae-45a9-841a-492ef07c2aa1" containerName="collect-profiles" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.051269 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a53a92e6-6fae-45a9-841a-492ef07c2aa1" containerName="collect-profiles" Nov 30 09:00:09 crc kubenswrapper[4941]: E1130 09:00:09.051292 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="registry-server" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.051299 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="registry-server" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.051527 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="92519cda-0730-4b25-b85a-76843c2cc17f" containerName="registry-server" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.051557 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a53a92e6-6fae-45a9-841a-492ef07c2aa1" containerName="collect-profiles" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.053274 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.069796 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tg6kf"] Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.202808 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207a16c0-01d3-49a0-8892-daf7e01defc8-catalog-content\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.202929 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207a16c0-01d3-49a0-8892-daf7e01defc8-utilities\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.203011 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhtm4\" (UniqueName: \"kubernetes.io/projected/207a16c0-01d3-49a0-8892-daf7e01defc8-kube-api-access-hhtm4\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.307938 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207a16c0-01d3-49a0-8892-daf7e01defc8-utilities\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.308032 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhtm4\" (UniqueName: \"kubernetes.io/projected/207a16c0-01d3-49a0-8892-daf7e01defc8-kube-api-access-hhtm4\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.308116 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207a16c0-01d3-49a0-8892-daf7e01defc8-catalog-content\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.308765 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/207a16c0-01d3-49a0-8892-daf7e01defc8-catalog-content\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.308991 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/207a16c0-01d3-49a0-8892-daf7e01defc8-utilities\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.338445 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhtm4\" (UniqueName: \"kubernetes.io/projected/207a16c0-01d3-49a0-8892-daf7e01defc8-kube-api-access-hhtm4\") pod \"certified-operators-tg6kf\" (UID: \"207a16c0-01d3-49a0-8892-daf7e01defc8\") " pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.373038 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:09 crc kubenswrapper[4941]: W1130 09:00:09.931287 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod207a16c0_01d3_49a0_8892_daf7e01defc8.slice/crio-57eca87ad88107e62f00b242d3eba213b3497635e8f406254e1b47d0d2825080 WatchSource:0}: Error finding container 57eca87ad88107e62f00b242d3eba213b3497635e8f406254e1b47d0d2825080: Status 404 returned error can't find the container with id 57eca87ad88107e62f00b242d3eba213b3497635e8f406254e1b47d0d2825080 Nov 30 09:00:09 crc kubenswrapper[4941]: I1130 09:00:09.931903 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tg6kf"] Nov 30 09:00:10 crc kubenswrapper[4941]: E1130 09:00:10.397186 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod207a16c0_01d3_49a0_8892_daf7e01defc8.slice/crio-conmon-dd4c7a46c78b44bbad61d6648e57745ea0dc6393373c79426efa14d9a73260c3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod207a16c0_01d3_49a0_8892_daf7e01defc8.slice/crio-dd4c7a46c78b44bbad61d6648e57745ea0dc6393373c79426efa14d9a73260c3.scope\": RecentStats: unable to find data in memory cache]" Nov 30 09:00:10 crc kubenswrapper[4941]: I1130 09:00:10.682827 4941 generic.go:334] "Generic (PLEG): container finished" podID="207a16c0-01d3-49a0-8892-daf7e01defc8" containerID="dd4c7a46c78b44bbad61d6648e57745ea0dc6393373c79426efa14d9a73260c3" exitCode=0 Nov 30 09:00:10 crc kubenswrapper[4941]: I1130 09:00:10.682891 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tg6kf" event={"ID":"207a16c0-01d3-49a0-8892-daf7e01defc8","Type":"ContainerDied","Data":"dd4c7a46c78b44bbad61d6648e57745ea0dc6393373c79426efa14d9a73260c3"} Nov 30 09:00:10 crc kubenswrapper[4941]: I1130 09:00:10.682929 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tg6kf" event={"ID":"207a16c0-01d3-49a0-8892-daf7e01defc8","Type":"ContainerStarted","Data":"57eca87ad88107e62f00b242d3eba213b3497635e8f406254e1b47d0d2825080"} Nov 30 09:00:13 crc kubenswrapper[4941]: I1130 09:00:13.844274 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nk77"] Nov 30 09:00:13 crc kubenswrapper[4941]: I1130 09:00:13.845887 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5nk77" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="registry-server" containerID="cri-o://24096c54537cb029608077d76fe01ad82dca35e01895f60437ba0ac9d18adf30" gracePeriod=2 Nov 30 09:00:14 crc kubenswrapper[4941]: I1130 09:00:14.742763 4941 generic.go:334] "Generic (PLEG): container finished" podID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerID="24096c54537cb029608077d76fe01ad82dca35e01895f60437ba0ac9d18adf30" exitCode=0 Nov 30 09:00:14 crc kubenswrapper[4941]: I1130 09:00:14.742891 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nk77" event={"ID":"aac813b8-825e-435d-b3e3-a2a9319b559b","Type":"ContainerDied","Data":"24096c54537cb029608077d76fe01ad82dca35e01895f60437ba0ac9d18adf30"} Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.403135 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.589159 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-utilities\") pod \"aac813b8-825e-435d-b3e3-a2a9319b559b\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.589280 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzl89\" (UniqueName: \"kubernetes.io/projected/aac813b8-825e-435d-b3e3-a2a9319b559b-kube-api-access-gzl89\") pod \"aac813b8-825e-435d-b3e3-a2a9319b559b\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.589397 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-catalog-content\") pod \"aac813b8-825e-435d-b3e3-a2a9319b559b\" (UID: \"aac813b8-825e-435d-b3e3-a2a9319b559b\") " Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.591550 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-utilities" (OuterVolumeSpecName: "utilities") pod "aac813b8-825e-435d-b3e3-a2a9319b559b" (UID: "aac813b8-825e-435d-b3e3-a2a9319b559b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.612510 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aac813b8-825e-435d-b3e3-a2a9319b559b-kube-api-access-gzl89" (OuterVolumeSpecName: "kube-api-access-gzl89") pod "aac813b8-825e-435d-b3e3-a2a9319b559b" (UID: "aac813b8-825e-435d-b3e3-a2a9319b559b"). InnerVolumeSpecName "kube-api-access-gzl89". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.614903 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aac813b8-825e-435d-b3e3-a2a9319b559b" (UID: "aac813b8-825e-435d-b3e3-a2a9319b559b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.692040 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.692196 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzl89\" (UniqueName: \"kubernetes.io/projected/aac813b8-825e-435d-b3e3-a2a9319b559b-kube-api-access-gzl89\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.692213 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aac813b8-825e-435d-b3e3-a2a9319b559b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.757812 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5nk77" event={"ID":"aac813b8-825e-435d-b3e3-a2a9319b559b","Type":"ContainerDied","Data":"24fe8524f4555a8af1d9be62cdb60b51de57002dd7ea763bfba3a043497f8d62"} Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.757889 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5nk77" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.757914 4941 scope.go:117] "RemoveContainer" containerID="24096c54537cb029608077d76fe01ad82dca35e01895f60437ba0ac9d18adf30" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.761039 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tg6kf" event={"ID":"207a16c0-01d3-49a0-8892-daf7e01defc8","Type":"ContainerStarted","Data":"ef926a67ca05bc547a9b141583e3afc418d235250bd1241f6af8bc9ce7203d4f"} Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.792543 4941 scope.go:117] "RemoveContainer" containerID="1838a1b79e3626c14d74fb3b62fbb2c3f7af3bc89f70c26b0fca306eaeb5ddad" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.845685 4941 scope.go:117] "RemoveContainer" containerID="394d02ed5f2b3a4fd6994725dd14967ef659898f1c04e2f177a8e77ed7b8a16a" Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.908136 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nk77"] Nov 30 09:00:15 crc kubenswrapper[4941]: I1130 09:00:15.919170 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5nk77"] Nov 30 09:00:16 crc kubenswrapper[4941]: I1130 09:00:16.774745 4941 generic.go:334] "Generic (PLEG): container finished" podID="207a16c0-01d3-49a0-8892-daf7e01defc8" containerID="ef926a67ca05bc547a9b141583e3afc418d235250bd1241f6af8bc9ce7203d4f" exitCode=0 Nov 30 09:00:16 crc kubenswrapper[4941]: I1130 09:00:16.774804 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tg6kf" event={"ID":"207a16c0-01d3-49a0-8892-daf7e01defc8","Type":"ContainerDied","Data":"ef926a67ca05bc547a9b141583e3afc418d235250bd1241f6af8bc9ce7203d4f"} Nov 30 09:00:17 crc kubenswrapper[4941]: I1130 09:00:17.541766 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" path="/var/lib/kubelet/pods/aac813b8-825e-435d-b3e3-a2a9319b559b/volumes" Nov 30 09:00:17 crc kubenswrapper[4941]: I1130 09:00:17.790059 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tg6kf" event={"ID":"207a16c0-01d3-49a0-8892-daf7e01defc8","Type":"ContainerStarted","Data":"88482157b6d88208fd27254c152e15534beb5b529138c3cbb1cd4a4d81bb87e4"} Nov 30 09:00:17 crc kubenswrapper[4941]: I1130 09:00:17.838806 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tg6kf" podStartSLOduration=2.120906609 podStartE2EDuration="8.838765305s" podCreationTimestamp="2025-11-30 09:00:09 +0000 UTC" firstStartedPulling="2025-11-30 09:00:10.687158834 +0000 UTC m=+8031.455330463" lastFinishedPulling="2025-11-30 09:00:17.40501752 +0000 UTC m=+8038.173189159" observedRunningTime="2025-11-30 09:00:17.812654979 +0000 UTC m=+8038.580826608" watchObservedRunningTime="2025-11-30 09:00:17.838765305 +0000 UTC m=+8038.606936924" Nov 30 09:00:19 crc kubenswrapper[4941]: I1130 09:00:19.373710 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:19 crc kubenswrapper[4941]: I1130 09:00:19.374470 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:19 crc kubenswrapper[4941]: I1130 09:00:19.453694 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:29 crc kubenswrapper[4941]: I1130 09:00:29.439291 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tg6kf" Nov 30 09:00:32 crc kubenswrapper[4941]: I1130 09:00:32.489611 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tg6kf"] Nov 30 09:00:32 crc kubenswrapper[4941]: I1130 09:00:32.971664 4941 generic.go:334] "Generic (PLEG): container finished" podID="34bc5f99-c870-4bc0-9873-b87d4f3fa30a" containerID="466a8b10dbb844f80974f610f9e8baf11a447aa2358033a5cb7535d0ceb92cd5" exitCode=0 Nov 30 09:00:32 crc kubenswrapper[4941]: I1130 09:00:32.971781 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" event={"ID":"34bc5f99-c870-4bc0-9873-b87d4f3fa30a","Type":"ContainerDied","Data":"466a8b10dbb844f80974f610f9e8baf11a447aa2358033a5cb7535d0ceb92cd5"} Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.046774 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s49t5"] Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.047124 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s49t5" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="registry-server" containerID="cri-o://fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29" gracePeriod=2 Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.615457 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.708587 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-utilities\") pod \"53470168-a7a9-4617-83d5-3acbbd4467e5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.708713 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-catalog-content\") pod \"53470168-a7a9-4617-83d5-3acbbd4467e5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.708892 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gnll\" (UniqueName: \"kubernetes.io/projected/53470168-a7a9-4617-83d5-3acbbd4467e5-kube-api-access-5gnll\") pod \"53470168-a7a9-4617-83d5-3acbbd4467e5\" (UID: \"53470168-a7a9-4617-83d5-3acbbd4467e5\") " Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.709012 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-utilities" (OuterVolumeSpecName: "utilities") pod "53470168-a7a9-4617-83d5-3acbbd4467e5" (UID: "53470168-a7a9-4617-83d5-3acbbd4467e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.709672 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.725591 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53470168-a7a9-4617-83d5-3acbbd4467e5-kube-api-access-5gnll" (OuterVolumeSpecName: "kube-api-access-5gnll") pod "53470168-a7a9-4617-83d5-3acbbd4467e5" (UID: "53470168-a7a9-4617-83d5-3acbbd4467e5"). InnerVolumeSpecName "kube-api-access-5gnll". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.792643 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53470168-a7a9-4617-83d5-3acbbd4467e5" (UID: "53470168-a7a9-4617-83d5-3acbbd4467e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.813443 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53470168-a7a9-4617-83d5-3acbbd4467e5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.813493 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gnll\" (UniqueName: \"kubernetes.io/projected/53470168-a7a9-4617-83d5-3acbbd4467e5-kube-api-access-5gnll\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.985771 4941 generic.go:334] "Generic (PLEG): container finished" podID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerID="fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29" exitCode=0 Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.985864 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s49t5" Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.985891 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s49t5" event={"ID":"53470168-a7a9-4617-83d5-3acbbd4467e5","Type":"ContainerDied","Data":"fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29"} Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.985959 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s49t5" event={"ID":"53470168-a7a9-4617-83d5-3acbbd4467e5","Type":"ContainerDied","Data":"df4879fc8a64b3a2092759f8aec3afad8930834a4b16c33238859f12017df7f4"} Nov 30 09:00:33 crc kubenswrapper[4941]: I1130 09:00:33.985992 4941 scope.go:117] "RemoveContainer" containerID="fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.031624 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s49t5"] Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.060628 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s49t5"] Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.074092 4941 scope.go:117] "RemoveContainer" containerID="4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.134128 4941 scope.go:117] "RemoveContainer" containerID="baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.169924 4941 scope.go:117] "RemoveContainer" containerID="fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29" Nov 30 09:00:34 crc kubenswrapper[4941]: E1130 09:00:34.170668 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29\": container with ID starting with fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29 not found: ID does not exist" containerID="fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.170725 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29"} err="failed to get container status \"fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29\": rpc error: code = NotFound desc = could not find container \"fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29\": container with ID starting with fe52e9ea6f9c5298a4d23c4543a3491bbd7321e390d19cb274119692d04d9c29 not found: ID does not exist" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.170760 4941 scope.go:117] "RemoveContainer" containerID="4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9" Nov 30 09:00:34 crc kubenswrapper[4941]: E1130 09:00:34.171203 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9\": container with ID starting with 4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9 not found: ID does not exist" containerID="4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.171260 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9"} err="failed to get container status \"4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9\": rpc error: code = NotFound desc = could not find container \"4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9\": container with ID starting with 4f0786e572ac9d793d95ecf71e24e36ecc1c296a9671068e78080ad447fe75c9 not found: ID does not exist" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.171297 4941 scope.go:117] "RemoveContainer" containerID="baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350" Nov 30 09:00:34 crc kubenswrapper[4941]: E1130 09:00:34.171620 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350\": container with ID starting with baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350 not found: ID does not exist" containerID="baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.171636 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350"} err="failed to get container status \"baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350\": rpc error: code = NotFound desc = could not find container \"baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350\": container with ID starting with baf545c5c5ad48d8bd08657ce3961f4e5d8db7d72b20fec416c9370d7fb3e350 not found: ID does not exist" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.620748 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.733103 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-combined-ca-bundle\") pod \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.733226 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-inventory\") pod \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.733312 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-agent-neutron-config-0\") pod \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.733421 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ceph\") pod \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.733477 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ssh-key\") pod \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.733730 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz8h9\" (UniqueName: \"kubernetes.io/projected/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-kube-api-access-dz8h9\") pod \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\" (UID: \"34bc5f99-c870-4bc0-9873-b87d4f3fa30a\") " Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.754949 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ceph" (OuterVolumeSpecName: "ceph") pod "34bc5f99-c870-4bc0-9873-b87d4f3fa30a" (UID: "34bc5f99-c870-4bc0-9873-b87d4f3fa30a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.754963 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "34bc5f99-c870-4bc0-9873-b87d4f3fa30a" (UID: "34bc5f99-c870-4bc0-9873-b87d4f3fa30a"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.755002 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-kube-api-access-dz8h9" (OuterVolumeSpecName: "kube-api-access-dz8h9") pod "34bc5f99-c870-4bc0-9873-b87d4f3fa30a" (UID: "34bc5f99-c870-4bc0-9873-b87d4f3fa30a"). InnerVolumeSpecName "kube-api-access-dz8h9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.786467 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "34bc5f99-c870-4bc0-9873-b87d4f3fa30a" (UID: "34bc5f99-c870-4bc0-9873-b87d4f3fa30a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.799545 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "34bc5f99-c870-4bc0-9873-b87d4f3fa30a" (UID: "34bc5f99-c870-4bc0-9873-b87d4f3fa30a"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.800713 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-inventory" (OuterVolumeSpecName: "inventory") pod "34bc5f99-c870-4bc0-9873-b87d4f3fa30a" (UID: "34bc5f99-c870-4bc0-9873-b87d4f3fa30a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.836822 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.836875 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz8h9\" (UniqueName: \"kubernetes.io/projected/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-kube-api-access-dz8h9\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.836891 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.836906 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.836917 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.836926 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/34bc5f99-c870-4bc0-9873-b87d4f3fa30a-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.998922 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.998901 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-cvf6p" event={"ID":"34bc5f99-c870-4bc0-9873-b87d4f3fa30a","Type":"ContainerDied","Data":"65d3c32c4d8e58227c92052fd24087a0f54ccc2d1877351f38dc83fe4bbdc15b"} Nov 30 09:00:34 crc kubenswrapper[4941]: I1130 09:00:34.999558 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65d3c32c4d8e58227c92052fd24087a0f54ccc2d1877351f38dc83fe4bbdc15b" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.266756 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-7llxw"] Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270581 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="extract-content" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270609 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="extract-content" Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270790 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34bc5f99-c870-4bc0-9873-b87d4f3fa30a" containerName="neutron-sriov-openstack-openstack-cell1" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270804 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="34bc5f99-c870-4bc0-9873-b87d4f3fa30a" containerName="neutron-sriov-openstack-openstack-cell1" Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270833 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="registry-server" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270848 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="registry-server" Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270872 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="extract-utilities" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270882 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="extract-utilities" Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270900 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="extract-utilities" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270909 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="extract-utilities" Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270927 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="registry-server" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270936 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="registry-server" Nov 30 09:00:35 crc kubenswrapper[4941]: E1130 09:00:35.270962 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="extract-content" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.270972 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="extract-content" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.274031 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="34bc5f99-c870-4bc0-9873-b87d4f3fa30a" containerName="neutron-sriov-openstack-openstack-cell1" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.274093 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" containerName="registry-server" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.274133 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="aac813b8-825e-435d-b3e3-a2a9319b559b" containerName="registry-server" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.276892 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.282371 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.283232 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.283418 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.283915 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.285684 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.341980 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-7llxw"] Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.349229 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.349896 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.350256 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.350584 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjskj\" (UniqueName: \"kubernetes.io/projected/2cc0b49d-63a4-4376-87d3-9866a06212e7-kube-api-access-vjskj\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.351064 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.351200 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.453123 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.453210 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.453270 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.453304 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjskj\" (UniqueName: \"kubernetes.io/projected/2cc0b49d-63a4-4376-87d3-9866a06212e7-kube-api-access-vjskj\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.453407 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.453440 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.459160 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.463967 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.467425 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.469891 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.470244 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.477125 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjskj\" (UniqueName: \"kubernetes.io/projected/2cc0b49d-63a4-4376-87d3-9866a06212e7-kube-api-access-vjskj\") pod \"neutron-dhcp-openstack-openstack-cell1-7llxw\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.534001 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53470168-a7a9-4617-83d5-3acbbd4467e5" path="/var/lib/kubelet/pods/53470168-a7a9-4617-83d5-3acbbd4467e5/volumes" Nov 30 09:00:35 crc kubenswrapper[4941]: I1130 09:00:35.626776 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:00:36 crc kubenswrapper[4941]: I1130 09:00:36.258861 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-7llxw"] Nov 30 09:00:36 crc kubenswrapper[4941]: W1130 09:00:36.264144 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cc0b49d_63a4_4376_87d3_9866a06212e7.slice/crio-29cba32a4d5215c3734823a0adcd1aa6c3665457e3e6ee6172bbb15aef97eccb WatchSource:0}: Error finding container 29cba32a4d5215c3734823a0adcd1aa6c3665457e3e6ee6172bbb15aef97eccb: Status 404 returned error can't find the container with id 29cba32a4d5215c3734823a0adcd1aa6c3665457e3e6ee6172bbb15aef97eccb Nov 30 09:00:37 crc kubenswrapper[4941]: I1130 09:00:37.026291 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" event={"ID":"2cc0b49d-63a4-4376-87d3-9866a06212e7","Type":"ContainerStarted","Data":"29cba32a4d5215c3734823a0adcd1aa6c3665457e3e6ee6172bbb15aef97eccb"} Nov 30 09:00:38 crc kubenswrapper[4941]: I1130 09:00:38.044442 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" event={"ID":"2cc0b49d-63a4-4376-87d3-9866a06212e7","Type":"ContainerStarted","Data":"f435416a6bedf7a3bb79e9850d39c8afc53004834825c24a9d6e90733048e5b6"} Nov 30 09:00:53 crc kubenswrapper[4941]: I1130 09:00:53.089840 4941 scope.go:117] "RemoveContainer" containerID="71ebd700bebc680bcf6341a369cb2f1e7bcf789a3d9c1fc3d9c17d560fadc25f" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.158715 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" podStartSLOduration=24.705212346 podStartE2EDuration="25.15868632s" podCreationTimestamp="2025-11-30 09:00:35 +0000 UTC" firstStartedPulling="2025-11-30 09:00:36.270475222 +0000 UTC m=+8057.038646831" lastFinishedPulling="2025-11-30 09:00:36.723949196 +0000 UTC m=+8057.492120805" observedRunningTime="2025-11-30 09:00:38.07953913 +0000 UTC m=+8058.847710779" watchObservedRunningTime="2025-11-30 09:01:00.15868632 +0000 UTC m=+8080.926857939" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.162531 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29408221-spdm4"] Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.164444 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.179590 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29408221-spdm4"] Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.250596 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-fernet-keys\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.250792 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-config-data\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.250850 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6nzp\" (UniqueName: \"kubernetes.io/projected/90448577-f8e3-4277-923b-50d69fc2273e-kube-api-access-p6nzp\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.250957 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-combined-ca-bundle\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.353035 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-config-data\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.353502 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6nzp\" (UniqueName: \"kubernetes.io/projected/90448577-f8e3-4277-923b-50d69fc2273e-kube-api-access-p6nzp\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.353575 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-combined-ca-bundle\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.353637 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-fernet-keys\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.362476 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-fernet-keys\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.362499 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-config-data\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.363075 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-combined-ca-bundle\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.372479 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6nzp\" (UniqueName: \"kubernetes.io/projected/90448577-f8e3-4277-923b-50d69fc2273e-kube-api-access-p6nzp\") pod \"keystone-cron-29408221-spdm4\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.498514 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:00 crc kubenswrapper[4941]: I1130 09:01:00.991698 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29408221-spdm4"] Nov 30 09:01:01 crc kubenswrapper[4941]: I1130 09:01:01.390409 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29408221-spdm4" event={"ID":"90448577-f8e3-4277-923b-50d69fc2273e","Type":"ContainerStarted","Data":"8223dcc6aebaddf90ad5f4261851f95f8f61bdad6cb674bccb3e1e017adbc67d"} Nov 30 09:01:01 crc kubenswrapper[4941]: I1130 09:01:01.390904 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29408221-spdm4" event={"ID":"90448577-f8e3-4277-923b-50d69fc2273e","Type":"ContainerStarted","Data":"673ad9e00434398fd86c8fb4b04ac4ea9d308438626b704d11f530aa84ea5bf9"} Nov 30 09:01:01 crc kubenswrapper[4941]: I1130 09:01:01.420315 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29408221-spdm4" podStartSLOduration=1.4202883 podStartE2EDuration="1.4202883s" podCreationTimestamp="2025-11-30 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:01:01.408361381 +0000 UTC m=+8082.176533000" watchObservedRunningTime="2025-11-30 09:01:01.4202883 +0000 UTC m=+8082.188459909" Nov 30 09:01:04 crc kubenswrapper[4941]: I1130 09:01:04.428718 4941 generic.go:334] "Generic (PLEG): container finished" podID="90448577-f8e3-4277-923b-50d69fc2273e" containerID="8223dcc6aebaddf90ad5f4261851f95f8f61bdad6cb674bccb3e1e017adbc67d" exitCode=0 Nov 30 09:01:04 crc kubenswrapper[4941]: I1130 09:01:04.428807 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29408221-spdm4" event={"ID":"90448577-f8e3-4277-923b-50d69fc2273e","Type":"ContainerDied","Data":"8223dcc6aebaddf90ad5f4261851f95f8f61bdad6cb674bccb3e1e017adbc67d"} Nov 30 09:01:05 crc kubenswrapper[4941]: I1130 09:01:05.954845 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.013670 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-combined-ca-bundle\") pod \"90448577-f8e3-4277-923b-50d69fc2273e\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.013979 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-fernet-keys\") pod \"90448577-f8e3-4277-923b-50d69fc2273e\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.014074 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-config-data\") pod \"90448577-f8e3-4277-923b-50d69fc2273e\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.014175 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6nzp\" (UniqueName: \"kubernetes.io/projected/90448577-f8e3-4277-923b-50d69fc2273e-kube-api-access-p6nzp\") pod \"90448577-f8e3-4277-923b-50d69fc2273e\" (UID: \"90448577-f8e3-4277-923b-50d69fc2273e\") " Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.035940 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "90448577-f8e3-4277-923b-50d69fc2273e" (UID: "90448577-f8e3-4277-923b-50d69fc2273e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.036112 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90448577-f8e3-4277-923b-50d69fc2273e-kube-api-access-p6nzp" (OuterVolumeSpecName: "kube-api-access-p6nzp") pod "90448577-f8e3-4277-923b-50d69fc2273e" (UID: "90448577-f8e3-4277-923b-50d69fc2273e"). InnerVolumeSpecName "kube-api-access-p6nzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.056143 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90448577-f8e3-4277-923b-50d69fc2273e" (UID: "90448577-f8e3-4277-923b-50d69fc2273e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.086966 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-config-data" (OuterVolumeSpecName: "config-data") pod "90448577-f8e3-4277-923b-50d69fc2273e" (UID: "90448577-f8e3-4277-923b-50d69fc2273e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.117265 4941 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.117359 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.117370 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6nzp\" (UniqueName: \"kubernetes.io/projected/90448577-f8e3-4277-923b-50d69fc2273e-kube-api-access-p6nzp\") on node \"crc\" DevicePath \"\"" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.117381 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90448577-f8e3-4277-923b-50d69fc2273e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.455107 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29408221-spdm4" event={"ID":"90448577-f8e3-4277-923b-50d69fc2273e","Type":"ContainerDied","Data":"673ad9e00434398fd86c8fb4b04ac4ea9d308438626b704d11f530aa84ea5bf9"} Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.455155 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="673ad9e00434398fd86c8fb4b04ac4ea9d308438626b704d11f530aa84ea5bf9" Nov 30 09:01:06 crc kubenswrapper[4941]: I1130 09:01:06.455384 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29408221-spdm4" Nov 30 09:01:32 crc kubenswrapper[4941]: I1130 09:01:32.978374 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:01:32 crc kubenswrapper[4941]: I1130 09:01:32.979179 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:02:02 crc kubenswrapper[4941]: I1130 09:02:02.978729 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:02:02 crc kubenswrapper[4941]: I1130 09:02:02.979713 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:02:32 crc kubenswrapper[4941]: I1130 09:02:32.978714 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:02:32 crc kubenswrapper[4941]: I1130 09:02:32.979583 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:02:32 crc kubenswrapper[4941]: I1130 09:02:32.979650 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:02:32 crc kubenswrapper[4941]: I1130 09:02:32.981072 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:02:32 crc kubenswrapper[4941]: I1130 09:02:32.981154 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" gracePeriod=600 Nov 30 09:02:33 crc kubenswrapper[4941]: E1130 09:02:33.101462 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:02:33 crc kubenswrapper[4941]: I1130 09:02:33.658914 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" exitCode=0 Nov 30 09:02:33 crc kubenswrapper[4941]: I1130 09:02:33.659110 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162"} Nov 30 09:02:33 crc kubenswrapper[4941]: I1130 09:02:33.659392 4941 scope.go:117] "RemoveContainer" containerID="f6761dbd1bbcf1b92940c5b8baec59e6668797156eef4bf7a2a454a505d37305" Nov 30 09:02:33 crc kubenswrapper[4941]: I1130 09:02:33.660549 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:02:33 crc kubenswrapper[4941]: E1130 09:02:33.661041 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:02:44 crc kubenswrapper[4941]: I1130 09:02:44.522728 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:02:44 crc kubenswrapper[4941]: E1130 09:02:44.524219 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:02:59 crc kubenswrapper[4941]: I1130 09:02:59.535949 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:02:59 crc kubenswrapper[4941]: E1130 09:02:59.537079 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:03:11 crc kubenswrapper[4941]: I1130 09:03:11.523090 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:03:11 crc kubenswrapper[4941]: E1130 09:03:11.524459 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:03:24 crc kubenswrapper[4941]: I1130 09:03:24.523298 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:03:24 crc kubenswrapper[4941]: E1130 09:03:24.525739 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:03:39 crc kubenswrapper[4941]: I1130 09:03:39.541916 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:03:39 crc kubenswrapper[4941]: E1130 09:03:39.543651 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:03:53 crc kubenswrapper[4941]: I1130 09:03:53.524010 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:03:53 crc kubenswrapper[4941]: E1130 09:03:53.525806 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:04:06 crc kubenswrapper[4941]: I1130 09:04:06.523208 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:04:06 crc kubenswrapper[4941]: E1130 09:04:06.527461 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.725760 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jwd8q"] Nov 30 09:04:18 crc kubenswrapper[4941]: E1130 09:04:18.727100 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90448577-f8e3-4277-923b-50d69fc2273e" containerName="keystone-cron" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.727116 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="90448577-f8e3-4277-923b-50d69fc2273e" containerName="keystone-cron" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.727400 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="90448577-f8e3-4277-923b-50d69fc2273e" containerName="keystone-cron" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.731867 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.737914 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jwd8q"] Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.839251 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670f3a59-54e6-4bd2-a8fc-a74be681bae4-utilities\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.839465 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr5sq\" (UniqueName: \"kubernetes.io/projected/670f3a59-54e6-4bd2-a8fc-a74be681bae4-kube-api-access-dr5sq\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.839787 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670f3a59-54e6-4bd2-a8fc-a74be681bae4-catalog-content\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.943255 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670f3a59-54e6-4bd2-a8fc-a74be681bae4-catalog-content\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.943496 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670f3a59-54e6-4bd2-a8fc-a74be681bae4-utilities\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.943562 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr5sq\" (UniqueName: \"kubernetes.io/projected/670f3a59-54e6-4bd2-a8fc-a74be681bae4-kube-api-access-dr5sq\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.943966 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/670f3a59-54e6-4bd2-a8fc-a74be681bae4-catalog-content\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.944152 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/670f3a59-54e6-4bd2-a8fc-a74be681bae4-utilities\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:18 crc kubenswrapper[4941]: I1130 09:04:18.968275 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr5sq\" (UniqueName: \"kubernetes.io/projected/670f3a59-54e6-4bd2-a8fc-a74be681bae4-kube-api-access-dr5sq\") pod \"community-operators-jwd8q\" (UID: \"670f3a59-54e6-4bd2-a8fc-a74be681bae4\") " pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:19 crc kubenswrapper[4941]: I1130 09:04:19.069418 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:19 crc kubenswrapper[4941]: I1130 09:04:19.523224 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:04:19 crc kubenswrapper[4941]: E1130 09:04:19.526219 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:04:19 crc kubenswrapper[4941]: I1130 09:04:19.693811 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jwd8q"] Nov 30 09:04:20 crc kubenswrapper[4941]: I1130 09:04:20.154571 4941 generic.go:334] "Generic (PLEG): container finished" podID="670f3a59-54e6-4bd2-a8fc-a74be681bae4" containerID="e487e9f98bb1d56ed016b0ae4a92e50a9322ad483e298169145f687f9a24419b" exitCode=0 Nov 30 09:04:20 crc kubenswrapper[4941]: I1130 09:04:20.154671 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwd8q" event={"ID":"670f3a59-54e6-4bd2-a8fc-a74be681bae4","Type":"ContainerDied","Data":"e487e9f98bb1d56ed016b0ae4a92e50a9322ad483e298169145f687f9a24419b"} Nov 30 09:04:20 crc kubenswrapper[4941]: I1130 09:04:20.154731 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwd8q" event={"ID":"670f3a59-54e6-4bd2-a8fc-a74be681bae4","Type":"ContainerStarted","Data":"56299037f97139b3f25fd2f683abb537a16646318f89fab233bf40379b61a45b"} Nov 30 09:04:20 crc kubenswrapper[4941]: I1130 09:04:20.159607 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 09:04:24 crc kubenswrapper[4941]: I1130 09:04:24.230767 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwd8q" event={"ID":"670f3a59-54e6-4bd2-a8fc-a74be681bae4","Type":"ContainerStarted","Data":"6d26fbb3b405c4d82a3dddfb1cbc3d15115f1ebdb6341dd312c132db291de72f"} Nov 30 09:04:25 crc kubenswrapper[4941]: I1130 09:04:25.246495 4941 generic.go:334] "Generic (PLEG): container finished" podID="670f3a59-54e6-4bd2-a8fc-a74be681bae4" containerID="6d26fbb3b405c4d82a3dddfb1cbc3d15115f1ebdb6341dd312c132db291de72f" exitCode=0 Nov 30 09:04:25 crc kubenswrapper[4941]: I1130 09:04:25.246557 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwd8q" event={"ID":"670f3a59-54e6-4bd2-a8fc-a74be681bae4","Type":"ContainerDied","Data":"6d26fbb3b405c4d82a3dddfb1cbc3d15115f1ebdb6341dd312c132db291de72f"} Nov 30 09:04:26 crc kubenswrapper[4941]: I1130 09:04:26.291169 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jwd8q" event={"ID":"670f3a59-54e6-4bd2-a8fc-a74be681bae4","Type":"ContainerStarted","Data":"7637a884c02c138ae9615d35fbb7fdc2db066b009987dfd7011da6d10cd886d7"} Nov 30 09:04:26 crc kubenswrapper[4941]: I1130 09:04:26.329534 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jwd8q" podStartSLOduration=2.531121746 podStartE2EDuration="8.329505947s" podCreationTimestamp="2025-11-30 09:04:18 +0000 UTC" firstStartedPulling="2025-11-30 09:04:20.159030161 +0000 UTC m=+8280.927201810" lastFinishedPulling="2025-11-30 09:04:25.957414402 +0000 UTC m=+8286.725586011" observedRunningTime="2025-11-30 09:04:26.311571882 +0000 UTC m=+8287.079743491" watchObservedRunningTime="2025-11-30 09:04:26.329505947 +0000 UTC m=+8287.097677546" Nov 30 09:04:29 crc kubenswrapper[4941]: I1130 09:04:29.069834 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:29 crc kubenswrapper[4941]: I1130 09:04:29.071492 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:29 crc kubenswrapper[4941]: I1130 09:04:29.128592 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:33 crc kubenswrapper[4941]: I1130 09:04:33.522499 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:04:33 crc kubenswrapper[4941]: E1130 09:04:33.523201 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.171211 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jwd8q" Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.283794 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jwd8q"] Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.336732 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.337555 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xxv9s" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="registry-server" containerID="cri-o://7dcaf837bd85984d992004522ef17a00cf1538973725e0d3c4e4d3b7e55495f1" gracePeriod=2 Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.522572 4941 generic.go:334] "Generic (PLEG): container finished" podID="56cf97ff-5196-4975-909b-a838177567ee" containerID="7dcaf837bd85984d992004522ef17a00cf1538973725e0d3c4e4d3b7e55495f1" exitCode=0 Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.546187 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxv9s" event={"ID":"56cf97ff-5196-4975-909b-a838177567ee","Type":"ContainerDied","Data":"7dcaf837bd85984d992004522ef17a00cf1538973725e0d3c4e4d3b7e55495f1"} Nov 30 09:04:39 crc kubenswrapper[4941]: I1130 09:04:39.900736 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.006718 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-utilities\") pod \"56cf97ff-5196-4975-909b-a838177567ee\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.006832 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r92w\" (UniqueName: \"kubernetes.io/projected/56cf97ff-5196-4975-909b-a838177567ee-kube-api-access-2r92w\") pod \"56cf97ff-5196-4975-909b-a838177567ee\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.006870 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-catalog-content\") pod \"56cf97ff-5196-4975-909b-a838177567ee\" (UID: \"56cf97ff-5196-4975-909b-a838177567ee\") " Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.008071 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-utilities" (OuterVolumeSpecName: "utilities") pod "56cf97ff-5196-4975-909b-a838177567ee" (UID: "56cf97ff-5196-4975-909b-a838177567ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.017578 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56cf97ff-5196-4975-909b-a838177567ee-kube-api-access-2r92w" (OuterVolumeSpecName: "kube-api-access-2r92w") pod "56cf97ff-5196-4975-909b-a838177567ee" (UID: "56cf97ff-5196-4975-909b-a838177567ee"). InnerVolumeSpecName "kube-api-access-2r92w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.086459 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56cf97ff-5196-4975-909b-a838177567ee" (UID: "56cf97ff-5196-4975-909b-a838177567ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.109995 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.110243 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r92w\" (UniqueName: \"kubernetes.io/projected/56cf97ff-5196-4975-909b-a838177567ee-kube-api-access-2r92w\") on node \"crc\" DevicePath \"\"" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.110363 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56cf97ff-5196-4975-909b-a838177567ee-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.539293 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxv9s" event={"ID":"56cf97ff-5196-4975-909b-a838177567ee","Type":"ContainerDied","Data":"5c8a4c2630a1160373c5e0352ecad17075aa79cad516131bec1f3903fcf91320"} Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.539381 4941 scope.go:117] "RemoveContainer" containerID="7dcaf837bd85984d992004522ef17a00cf1538973725e0d3c4e4d3b7e55495f1" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.539616 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xxv9s" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.582946 4941 scope.go:117] "RemoveContainer" containerID="1dfa34af432e9922d2f165ebdf1e46105b476e0a0bc9ac521fb30723f9ff4b13" Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.623383 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.634075 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xxv9s"] Nov 30 09:04:40 crc kubenswrapper[4941]: I1130 09:04:40.638487 4941 scope.go:117] "RemoveContainer" containerID="9907603cc8ca065b78847c064ea00df0ff7f849eff710e7a2dfaf0e370326c8b" Nov 30 09:04:41 crc kubenswrapper[4941]: I1130 09:04:41.544645 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56cf97ff-5196-4975-909b-a838177567ee" path="/var/lib/kubelet/pods/56cf97ff-5196-4975-909b-a838177567ee/volumes" Nov 30 09:04:45 crc kubenswrapper[4941]: I1130 09:04:45.522455 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:04:45 crc kubenswrapper[4941]: E1130 09:04:45.523629 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:04:58 crc kubenswrapper[4941]: I1130 09:04:58.522986 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:04:58 crc kubenswrapper[4941]: E1130 09:04:58.524230 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:05:13 crc kubenswrapper[4941]: I1130 09:05:13.522866 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:05:13 crc kubenswrapper[4941]: E1130 09:05:13.525640 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:05:26 crc kubenswrapper[4941]: I1130 09:05:26.523098 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:05:26 crc kubenswrapper[4941]: E1130 09:05:26.524060 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:05:40 crc kubenswrapper[4941]: I1130 09:05:40.522741 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:05:40 crc kubenswrapper[4941]: E1130 09:05:40.524571 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:05:44 crc kubenswrapper[4941]: I1130 09:05:44.429014 4941 generic.go:334] "Generic (PLEG): container finished" podID="2cc0b49d-63a4-4376-87d3-9866a06212e7" containerID="f435416a6bedf7a3bb79e9850d39c8afc53004834825c24a9d6e90733048e5b6" exitCode=0 Nov 30 09:05:44 crc kubenswrapper[4941]: I1130 09:05:44.429156 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" event={"ID":"2cc0b49d-63a4-4376-87d3-9866a06212e7","Type":"ContainerDied","Data":"f435416a6bedf7a3bb79e9850d39c8afc53004834825c24a9d6e90733048e5b6"} Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.025688 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.057652 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ssh-key\") pod \"2cc0b49d-63a4-4376-87d3-9866a06212e7\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.058424 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ceph\") pod \"2cc0b49d-63a4-4376-87d3-9866a06212e7\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.058624 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-combined-ca-bundle\") pod \"2cc0b49d-63a4-4376-87d3-9866a06212e7\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.058720 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-agent-neutron-config-0\") pod \"2cc0b49d-63a4-4376-87d3-9866a06212e7\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.058847 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-inventory\") pod \"2cc0b49d-63a4-4376-87d3-9866a06212e7\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.059657 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjskj\" (UniqueName: \"kubernetes.io/projected/2cc0b49d-63a4-4376-87d3-9866a06212e7-kube-api-access-vjskj\") pod \"2cc0b49d-63a4-4376-87d3-9866a06212e7\" (UID: \"2cc0b49d-63a4-4376-87d3-9866a06212e7\") " Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.068582 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ceph" (OuterVolumeSpecName: "ceph") pod "2cc0b49d-63a4-4376-87d3-9866a06212e7" (UID: "2cc0b49d-63a4-4376-87d3-9866a06212e7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.069500 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "2cc0b49d-63a4-4376-87d3-9866a06212e7" (UID: "2cc0b49d-63a4-4376-87d3-9866a06212e7"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.069987 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc0b49d-63a4-4376-87d3-9866a06212e7-kube-api-access-vjskj" (OuterVolumeSpecName: "kube-api-access-vjskj") pod "2cc0b49d-63a4-4376-87d3-9866a06212e7" (UID: "2cc0b49d-63a4-4376-87d3-9866a06212e7"). InnerVolumeSpecName "kube-api-access-vjskj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.093110 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2cc0b49d-63a4-4376-87d3-9866a06212e7" (UID: "2cc0b49d-63a4-4376-87d3-9866a06212e7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.108274 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "2cc0b49d-63a4-4376-87d3-9866a06212e7" (UID: "2cc0b49d-63a4-4376-87d3-9866a06212e7"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.110203 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-inventory" (OuterVolumeSpecName: "inventory") pod "2cc0b49d-63a4-4376-87d3-9866a06212e7" (UID: "2cc0b49d-63a4-4376-87d3-9866a06212e7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.165056 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.165141 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.165167 4941 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.165191 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.165211 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjskj\" (UniqueName: \"kubernetes.io/projected/2cc0b49d-63a4-4376-87d3-9866a06212e7-kube-api-access-vjskj\") on node \"crc\" DevicePath \"\"" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.165232 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc0b49d-63a4-4376-87d3-9866a06212e7-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.464563 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" event={"ID":"2cc0b49d-63a4-4376-87d3-9866a06212e7","Type":"ContainerDied","Data":"29cba32a4d5215c3734823a0adcd1aa6c3665457e3e6ee6172bbb15aef97eccb"} Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.464629 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29cba32a4d5215c3734823a0adcd1aa6c3665457e3e6ee6172bbb15aef97eccb" Nov 30 09:05:46 crc kubenswrapper[4941]: I1130 09:05:46.464680 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-7llxw" Nov 30 09:05:51 crc kubenswrapper[4941]: I1130 09:05:51.521819 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:05:51 crc kubenswrapper[4941]: E1130 09:05:51.522804 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:06:01 crc kubenswrapper[4941]: I1130 09:06:01.237715 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 09:06:01 crc kubenswrapper[4941]: I1130 09:06:01.238647 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerName="nova-cell0-conductor-conductor" containerID="cri-o://da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" gracePeriod=30 Nov 30 09:06:01 crc kubenswrapper[4941]: I1130 09:06:01.300000 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 09:06:01 crc kubenswrapper[4941]: I1130 09:06:01.300676 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="b0f67deb-a45e-4c98-ae10-793d43722433" containerName="nova-cell1-conductor-conductor" containerID="cri-o://60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020" gracePeriod=30 Nov 30 09:06:01 crc kubenswrapper[4941]: E1130 09:06:01.624963 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 09:06:01 crc kubenswrapper[4941]: E1130 09:06:01.626988 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 09:06:01 crc kubenswrapper[4941]: E1130 09:06:01.628882 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 09:06:01 crc kubenswrapper[4941]: E1130 09:06:01.629158 4941 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerName="nova-cell0-conductor-conductor" Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.043585 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.043955 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-log" containerID="cri-o://db663f0cce711c93efe883f85529d4ed3ff32c34a52b403b693e93a0a5794ae1" gracePeriod=30 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.044058 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-api" containerID="cri-o://af07f6fcebb8548eeb4f1bdb32a12056f3c2f9352865b6cbdd49adb22ef4d104" gracePeriod=30 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.156347 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.156630 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a356bfee-e892-474f-8f09-b179804dced1" containerName="nova-scheduler-scheduler" containerID="cri-o://ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24" gracePeriod=30 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.187884 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.188235 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-log" containerID="cri-o://403dcee0e54f75fc1e836211d7158dfd0dc692b8fbe61a00b31f70ec6f2f3c4d" gracePeriod=30 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.188815 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-metadata" containerID="cri-o://cfe8c2f296dd4667d96f5ff72fdb4c53478ea9c52589a33990517ef81a7b560a" gracePeriod=30 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.707670 4941 generic.go:334] "Generic (PLEG): container finished" podID="d7f3b443-db5d-4635-b028-9adf4c846090" containerID="403dcee0e54f75fc1e836211d7158dfd0dc692b8fbe61a00b31f70ec6f2f3c4d" exitCode=143 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.707755 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7f3b443-db5d-4635-b028-9adf4c846090","Type":"ContainerDied","Data":"403dcee0e54f75fc1e836211d7158dfd0dc692b8fbe61a00b31f70ec6f2f3c4d"} Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.711187 4941 generic.go:334] "Generic (PLEG): container finished" podID="cd10ed18-796a-4734-843c-9e2ae2973797" containerID="db663f0cce711c93efe883f85529d4ed3ff32c34a52b403b693e93a0a5794ae1" exitCode=143 Nov 30 09:06:02 crc kubenswrapper[4941]: I1130 09:06:02.711265 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd10ed18-796a-4734-843c-9e2ae2973797","Type":"ContainerDied","Data":"db663f0cce711c93efe883f85529d4ed3ff32c34a52b403b693e93a0a5794ae1"} Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.502441 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.521753 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.522212 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.622962 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-config-data\") pod \"b0f67deb-a45e-4c98-ae10-793d43722433\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.623190 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kplpx\" (UniqueName: \"kubernetes.io/projected/b0f67deb-a45e-4c98-ae10-793d43722433-kube-api-access-kplpx\") pod \"b0f67deb-a45e-4c98-ae10-793d43722433\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.623286 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-combined-ca-bundle\") pod \"b0f67deb-a45e-4c98-ae10-793d43722433\" (UID: \"b0f67deb-a45e-4c98-ae10-793d43722433\") " Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.630617 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0f67deb-a45e-4c98-ae10-793d43722433-kube-api-access-kplpx" (OuterVolumeSpecName: "kube-api-access-kplpx") pod "b0f67deb-a45e-4c98-ae10-793d43722433" (UID: "b0f67deb-a45e-4c98-ae10-793d43722433"). InnerVolumeSpecName "kube-api-access-kplpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.667871 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-config-data" (OuterVolumeSpecName: "config-data") pod "b0f67deb-a45e-4c98-ae10-793d43722433" (UID: "b0f67deb-a45e-4c98-ae10-793d43722433"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.670529 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0f67deb-a45e-4c98-ae10-793d43722433" (UID: "b0f67deb-a45e-4c98-ae10-793d43722433"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.730549 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.730588 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kplpx\" (UniqueName: \"kubernetes.io/projected/b0f67deb-a45e-4c98-ae10-793d43722433-kube-api-access-kplpx\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.730602 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0f67deb-a45e-4c98-ae10-793d43722433-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.747811 4941 generic.go:334] "Generic (PLEG): container finished" podID="b0f67deb-a45e-4c98-ae10-793d43722433" containerID="60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020" exitCode=0 Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.747994 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b0f67deb-a45e-4c98-ae10-793d43722433","Type":"ContainerDied","Data":"60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020"} Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.748043 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b0f67deb-a45e-4c98-ae10-793d43722433","Type":"ContainerDied","Data":"c030ba71005f520b777d0a4873c316ab1e7b5b117da69d40b7f2bf457345a952"} Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.748076 4941 scope.go:117] "RemoveContainer" containerID="60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.748365 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.753084 4941 generic.go:334] "Generic (PLEG): container finished" podID="a356bfee-e892-474f-8f09-b179804dced1" containerID="ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24" exitCode=0 Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.753206 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a356bfee-e892-474f-8f09-b179804dced1","Type":"ContainerDied","Data":"ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24"} Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.806177 4941 scope.go:117] "RemoveContainer" containerID="60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.806959 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020\": container with ID starting with 60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020 not found: ID does not exist" containerID="60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.807007 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020"} err="failed to get container status \"60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020\": rpc error: code = NotFound desc = could not find container \"60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020\": container with ID starting with 60b403b0e7222a7f1c9788af90f5652783616abcf2af562698efdaa79cc3e020 not found: ID does not exist" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.859066 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24 is running failed: container process not found" containerID="ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.859953 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24 is running failed: container process not found" containerID="ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.860918 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24 is running failed: container process not found" containerID="ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.860960 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a356bfee-e892-474f-8f09-b179804dced1" containerName="nova-scheduler-scheduler" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.879816 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.889981 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.905490 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.906367 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0f67deb-a45e-4c98-ae10-793d43722433" containerName="nova-cell1-conductor-conductor" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906399 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0f67deb-a45e-4c98-ae10-793d43722433" containerName="nova-cell1-conductor-conductor" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.906448 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc0b49d-63a4-4376-87d3-9866a06212e7" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906458 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc0b49d-63a4-4376-87d3-9866a06212e7" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.906481 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="extract-content" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906489 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="extract-content" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.906505 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="extract-utilities" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906512 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="extract-utilities" Nov 30 09:06:04 crc kubenswrapper[4941]: E1130 09:06:04.906529 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="registry-server" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906538 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="registry-server" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906847 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="56cf97ff-5196-4975-909b-a838177567ee" containerName="registry-server" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906880 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc0b49d-63a4-4376-87d3-9866a06212e7" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.906903 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0f67deb-a45e-4c98-ae10-793d43722433" containerName="nova-cell1-conductor-conductor" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.908609 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.914046 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 30 09:06:04 crc kubenswrapper[4941]: I1130 09:06:04.917124 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.039631 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16a91c01-c6ad-40d3-8f79-9a19235f0964-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.040104 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksmdg\" (UniqueName: \"kubernetes.io/projected/16a91c01-c6ad-40d3-8f79-9a19235f0964-kube-api-access-ksmdg\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.040419 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16a91c01-c6ad-40d3-8f79-9a19235f0964-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.048525 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.143150 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npj85\" (UniqueName: \"kubernetes.io/projected/a356bfee-e892-474f-8f09-b179804dced1-kube-api-access-npj85\") pod \"a356bfee-e892-474f-8f09-b179804dced1\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.143466 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-combined-ca-bundle\") pod \"a356bfee-e892-474f-8f09-b179804dced1\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.143531 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-config-data\") pod \"a356bfee-e892-474f-8f09-b179804dced1\" (UID: \"a356bfee-e892-474f-8f09-b179804dced1\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.144046 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksmdg\" (UniqueName: \"kubernetes.io/projected/16a91c01-c6ad-40d3-8f79-9a19235f0964-kube-api-access-ksmdg\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.144347 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16a91c01-c6ad-40d3-8f79-9a19235f0964-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.144504 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16a91c01-c6ad-40d3-8f79-9a19235f0964-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.148644 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a356bfee-e892-474f-8f09-b179804dced1-kube-api-access-npj85" (OuterVolumeSpecName: "kube-api-access-npj85") pod "a356bfee-e892-474f-8f09-b179804dced1" (UID: "a356bfee-e892-474f-8f09-b179804dced1"). InnerVolumeSpecName "kube-api-access-npj85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.149133 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16a91c01-c6ad-40d3-8f79-9a19235f0964-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.151241 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16a91c01-c6ad-40d3-8f79-9a19235f0964-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.164068 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksmdg\" (UniqueName: \"kubernetes.io/projected/16a91c01-c6ad-40d3-8f79-9a19235f0964-kube-api-access-ksmdg\") pod \"nova-cell1-conductor-0\" (UID: \"16a91c01-c6ad-40d3-8f79-9a19235f0964\") " pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.184604 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-config-data" (OuterVolumeSpecName: "config-data") pod "a356bfee-e892-474f-8f09-b179804dced1" (UID: "a356bfee-e892-474f-8f09-b179804dced1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.186686 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a356bfee-e892-474f-8f09-b179804dced1" (UID: "a356bfee-e892-474f-8f09-b179804dced1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.243301 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.246641 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npj85\" (UniqueName: \"kubernetes.io/projected/a356bfee-e892-474f-8f09-b179804dced1-kube-api-access-npj85\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.246676 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.246689 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a356bfee-e892-474f-8f09-b179804dced1-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.325318 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": read tcp 10.217.0.2:35022->10.217.1.77:8775: read: connection reset by peer" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.325366 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": read tcp 10.217.0.2:35018->10.217.1.77:8775: read: connection reset by peer" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.574257 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0f67deb-a45e-4c98-ae10-793d43722433" path="/var/lib/kubelet/pods/b0f67deb-a45e-4c98-ae10-793d43722433/volumes" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.779888 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a356bfee-e892-474f-8f09-b179804dced1","Type":"ContainerDied","Data":"7d7d91de554e3c0334b9929fb642df346128dc62d009d5c54cc279fecdf30120"} Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.780156 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.780304 4941 scope.go:117] "RemoveContainer" containerID="ab9e48f3ad9dce33268571c7b0f0f0dd5019375ec245be8dd96210695e589c24" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.791797 4941 generic.go:334] "Generic (PLEG): container finished" podID="d7f3b443-db5d-4635-b028-9adf4c846090" containerID="cfe8c2f296dd4667d96f5ff72fdb4c53478ea9c52589a33990517ef81a7b560a" exitCode=0 Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.791859 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7f3b443-db5d-4635-b028-9adf4c846090","Type":"ContainerDied","Data":"cfe8c2f296dd4667d96f5ff72fdb4c53478ea9c52589a33990517ef81a7b560a"} Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.810306 4941 generic.go:334] "Generic (PLEG): container finished" podID="cd10ed18-796a-4734-843c-9e2ae2973797" containerID="af07f6fcebb8548eeb4f1bdb32a12056f3c2f9352865b6cbdd49adb22ef4d104" exitCode=0 Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.810400 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd10ed18-796a-4734-843c-9e2ae2973797","Type":"ContainerDied","Data":"af07f6fcebb8548eeb4f1bdb32a12056f3c2f9352865b6cbdd49adb22ef4d104"} Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.810527 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.821096 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.842176 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.871022 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 09:06:05 crc kubenswrapper[4941]: E1130 09:06:05.871677 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a356bfee-e892-474f-8f09-b179804dced1" containerName="nova-scheduler-scheduler" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.871697 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a356bfee-e892-474f-8f09-b179804dced1" containerName="nova-scheduler-scheduler" Nov 30 09:06:05 crc kubenswrapper[4941]: E1130 09:06:05.871718 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-api" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.871727 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-api" Nov 30 09:06:05 crc kubenswrapper[4941]: E1130 09:06:05.871779 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-log" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.871789 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-log" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.872022 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a356bfee-e892-474f-8f09-b179804dced1" containerName="nova-scheduler-scheduler" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.872045 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-log" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.872064 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" containerName="nova-api-api" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.873643 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.878926 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.883065 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.904435 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvprl\" (UniqueName: \"kubernetes.io/projected/cd10ed18-796a-4734-843c-9e2ae2973797-kube-api-access-mvprl\") pod \"cd10ed18-796a-4734-843c-9e2ae2973797\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.904763 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-combined-ca-bundle\") pod \"cd10ed18-796a-4734-843c-9e2ae2973797\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.904871 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-config-data\") pod \"cd10ed18-796a-4734-843c-9e2ae2973797\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.904921 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd10ed18-796a-4734-843c-9e2ae2973797-logs\") pod \"cd10ed18-796a-4734-843c-9e2ae2973797\" (UID: \"cd10ed18-796a-4734-843c-9e2ae2973797\") " Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.905257 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wvtv\" (UniqueName: \"kubernetes.io/projected/d369d884-79c4-4bd4-b952-0076f23d8e66-kube-api-access-8wvtv\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.905487 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d369d884-79c4-4bd4-b952-0076f23d8e66-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.905574 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d369d884-79c4-4bd4-b952-0076f23d8e66-config-data\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.907807 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd10ed18-796a-4734-843c-9e2ae2973797-logs" (OuterVolumeSpecName: "logs") pod "cd10ed18-796a-4734-843c-9e2ae2973797" (UID: "cd10ed18-796a-4734-843c-9e2ae2973797"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.916485 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd10ed18-796a-4734-843c-9e2ae2973797-kube-api-access-mvprl" (OuterVolumeSpecName: "kube-api-access-mvprl") pod "cd10ed18-796a-4734-843c-9e2ae2973797" (UID: "cd10ed18-796a-4734-843c-9e2ae2973797"). InnerVolumeSpecName "kube-api-access-mvprl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.961421 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd10ed18-796a-4734-843c-9e2ae2973797" (UID: "cd10ed18-796a-4734-843c-9e2ae2973797"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:05 crc kubenswrapper[4941]: I1130 09:06:05.969076 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-config-data" (OuterVolumeSpecName: "config-data") pod "cd10ed18-796a-4734-843c-9e2ae2973797" (UID: "cd10ed18-796a-4734-843c-9e2ae2973797"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008480 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d369d884-79c4-4bd4-b952-0076f23d8e66-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008579 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d369d884-79c4-4bd4-b952-0076f23d8e66-config-data\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008612 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wvtv\" (UniqueName: \"kubernetes.io/projected/d369d884-79c4-4bd4-b952-0076f23d8e66-kube-api-access-8wvtv\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008701 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008716 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd10ed18-796a-4734-843c-9e2ae2973797-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008725 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd10ed18-796a-4734-843c-9e2ae2973797-logs\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.008734 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvprl\" (UniqueName: \"kubernetes.io/projected/cd10ed18-796a-4734-843c-9e2ae2973797-kube-api-access-mvprl\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.013002 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d369d884-79c4-4bd4-b952-0076f23d8e66-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.016845 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d369d884-79c4-4bd4-b952-0076f23d8e66-config-data\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.027310 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wvtv\" (UniqueName: \"kubernetes.io/projected/d369d884-79c4-4bd4-b952-0076f23d8e66-kube-api-access-8wvtv\") pod \"nova-scheduler-0\" (UID: \"d369d884-79c4-4bd4-b952-0076f23d8e66\") " pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.068926 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.196592 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.336183 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.418304 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlb6v\" (UniqueName: \"kubernetes.io/projected/d7f3b443-db5d-4635-b028-9adf4c846090-kube-api-access-nlb6v\") pod \"d7f3b443-db5d-4635-b028-9adf4c846090\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.418593 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-combined-ca-bundle\") pod \"d7f3b443-db5d-4635-b028-9adf4c846090\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.418640 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-config-data\") pod \"d7f3b443-db5d-4635-b028-9adf4c846090\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.418831 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7f3b443-db5d-4635-b028-9adf4c846090-logs\") pod \"d7f3b443-db5d-4635-b028-9adf4c846090\" (UID: \"d7f3b443-db5d-4635-b028-9adf4c846090\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.419762 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7f3b443-db5d-4635-b028-9adf4c846090-logs" (OuterVolumeSpecName: "logs") pod "d7f3b443-db5d-4635-b028-9adf4c846090" (UID: "d7f3b443-db5d-4635-b028-9adf4c846090"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.449543 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f3b443-db5d-4635-b028-9adf4c846090-kube-api-access-nlb6v" (OuterVolumeSpecName: "kube-api-access-nlb6v") pod "d7f3b443-db5d-4635-b028-9adf4c846090" (UID: "d7f3b443-db5d-4635-b028-9adf4c846090"). InnerVolumeSpecName "kube-api-access-nlb6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.460135 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-config-data" (OuterVolumeSpecName: "config-data") pod "d7f3b443-db5d-4635-b028-9adf4c846090" (UID: "d7f3b443-db5d-4635-b028-9adf4c846090"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.488286 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7f3b443-db5d-4635-b028-9adf4c846090" (UID: "d7f3b443-db5d-4635-b028-9adf4c846090"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.521640 4941 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7f3b443-db5d-4635-b028-9adf4c846090-logs\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.521678 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlb6v\" (UniqueName: \"kubernetes.io/projected/d7f3b443-db5d-4635-b028-9adf4c846090-kube-api-access-nlb6v\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.521693 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.521703 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7f3b443-db5d-4635-b028-9adf4c846090-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:06 crc kubenswrapper[4941]: E1130 09:06:06.628286 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942 is running failed: container process not found" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 09:06:06 crc kubenswrapper[4941]: E1130 09:06:06.629717 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942 is running failed: container process not found" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 09:06:06 crc kubenswrapper[4941]: E1130 09:06:06.630643 4941 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942 is running failed: container process not found" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 30 09:06:06 crc kubenswrapper[4941]: E1130 09:06:06.630731 4941 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerName="nova-cell0-conductor-conductor" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.759630 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.806097 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.844165 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d369d884-79c4-4bd4-b952-0076f23d8e66","Type":"ContainerStarted","Data":"d4f277b5eca063a5d12e596df75db98f4d4d757277bb9d9ef632c7c0d1903937"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.847437 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7f3b443-db5d-4635-b028-9adf4c846090","Type":"ContainerDied","Data":"3451bd08021b0805f87341f2183b981750a4bc4a5a0d6cc1a85728c48a189ce4"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.847499 4941 scope.go:117] "RemoveContainer" containerID="cfe8c2f296dd4667d96f5ff72fdb4c53478ea9c52589a33990517ef81a7b560a" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.847650 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.852980 4941 generic.go:334] "Generic (PLEG): container finished" podID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" exitCode=0 Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.853114 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6","Type":"ContainerDied","Data":"da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.853155 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6","Type":"ContainerDied","Data":"1a2c24acd376672a052fbc3e3d6fb001c932ae580492235f12f1fc24ebee5645"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.853248 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.858829 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"16a91c01-c6ad-40d3-8f79-9a19235f0964","Type":"ContainerStarted","Data":"91eefa79c09c17a10854a83621d7f85fa14322de74c7bb6420b97020da8d0204"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.858861 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"16a91c01-c6ad-40d3-8f79-9a19235f0964","Type":"ContainerStarted","Data":"bfb755102c8d61a0b4fec4036dbe8660419af6aa343461efbc5e815a76c676fc"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.859002 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.864034 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd10ed18-796a-4734-843c-9e2ae2973797","Type":"ContainerDied","Data":"1a45fc4c0967f6969afde9445bd72ce8d514e93a181cdf2237b77f58c477a6d8"} Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.864195 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.888905 4941 scope.go:117] "RemoveContainer" containerID="403dcee0e54f75fc1e836211d7158dfd0dc692b8fbe61a00b31f70ec6f2f3c4d" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.913981 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.913932926 podStartE2EDuration="2.913932926s" podCreationTimestamp="2025-11-30 09:06:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:06:06.884058592 +0000 UTC m=+8387.652230211" watchObservedRunningTime="2025-11-30 09:06:06.913932926 +0000 UTC m=+8387.682104535" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.937962 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-config-data\") pod \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.938164 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-combined-ca-bundle\") pod \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.938767 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr5jq\" (UniqueName: \"kubernetes.io/projected/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-kube-api-access-kr5jq\") pod \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\" (UID: \"6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6\") " Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.961402 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.984078 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.985894 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-kube-api-access-kr5jq" (OuterVolumeSpecName: "kube-api-access-kr5jq") pod "6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" (UID: "6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6"). InnerVolumeSpecName "kube-api-access-kr5jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:06:06 crc kubenswrapper[4941]: I1130 09:06:06.986315 4941 scope.go:117] "RemoveContainer" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.036071 4941 scope.go:117] "RemoveContainer" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" Nov 30 09:06:07 crc kubenswrapper[4941]: E1130 09:06:07.040749 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942\": container with ID starting with da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942 not found: ID does not exist" containerID="da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.040786 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942"} err="failed to get container status \"da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942\": rpc error: code = NotFound desc = could not find container \"da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942\": container with ID starting with da1528b632dcbf0c75609a152d60e535681e965f36278df6b6a4f1f40da5c942 not found: ID does not exist" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.040813 4941 scope.go:117] "RemoveContainer" containerID="af07f6fcebb8548eeb4f1bdb32a12056f3c2f9352865b6cbdd49adb22ef4d104" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.043562 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr5jq\" (UniqueName: \"kubernetes.io/projected/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-kube-api-access-kr5jq\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.054774 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: E1130 09:06:07.055511 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-metadata" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.055531 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-metadata" Nov 30 09:06:07 crc kubenswrapper[4941]: E1130 09:06:07.055555 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerName="nova-cell0-conductor-conductor" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.055564 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerName="nova-cell0-conductor-conductor" Nov 30 09:06:07 crc kubenswrapper[4941]: E1130 09:06:07.055591 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-log" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.055597 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-log" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.055921 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-metadata" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.055959 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" containerName="nova-cell0-conductor-conductor" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.055976 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" containerName="nova-metadata-log" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.058131 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.058193 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-config-data" (OuterVolumeSpecName: "config-data") pod "6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" (UID: "6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.061596 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.067353 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" (UID: "6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.081345 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.103397 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.116417 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.130531 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.134010 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.137290 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.143379 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.163449 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.163546 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-logs\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.163747 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-config-data\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.164072 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxtg2\" (UniqueName: \"kubernetes.io/projected/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-kube-api-access-nxtg2\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.164220 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.164259 4941 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.193919 4941 scope.go:117] "RemoveContainer" containerID="db663f0cce711c93efe883f85529d4ed3ff32c34a52b403b693e93a0a5794ae1" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.223475 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.231076 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.246021 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.248101 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.254995 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.255245 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.268551 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-logs\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269071 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-logs\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.268882 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-logs\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269124 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-config-data\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269232 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxtg2\" (UniqueName: \"kubernetes.io/projected/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-kube-api-access-nxtg2\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269267 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cpdr\" (UniqueName: \"kubernetes.io/projected/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-kube-api-access-6cpdr\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269393 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5606214f-0456-4a22-87dd-a3a6624afaf5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269441 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jll7\" (UniqueName: \"kubernetes.io/projected/5606214f-0456-4a22-87dd-a3a6624afaf5-kube-api-access-9jll7\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269475 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269544 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269574 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-config-data\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.269594 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5606214f-0456-4a22-87dd-a3a6624afaf5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.280171 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-config-data\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.286584 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.290217 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxtg2\" (UniqueName: \"kubernetes.io/projected/d7bb713a-b0ae-4bed-82ce-f1fa4de9281e-kube-api-access-nxtg2\") pod \"nova-api-0\" (UID: \"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e\") " pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.371855 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5606214f-0456-4a22-87dd-a3a6624afaf5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.371937 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jll7\" (UniqueName: \"kubernetes.io/projected/5606214f-0456-4a22-87dd-a3a6624afaf5-kube-api-access-9jll7\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.371976 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.372046 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-config-data\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.372083 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5606214f-0456-4a22-87dd-a3a6624afaf5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.372123 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-logs\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.372201 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cpdr\" (UniqueName: \"kubernetes.io/projected/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-kube-api-access-6cpdr\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.373093 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-logs\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.376983 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5606214f-0456-4a22-87dd-a3a6624afaf5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.379712 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-config-data\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.381118 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.381799 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5606214f-0456-4a22-87dd-a3a6624afaf5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.395779 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cpdr\" (UniqueName: \"kubernetes.io/projected/7fe20d7b-d57d-4291-b62a-a7d0e417ebdd-kube-api-access-6cpdr\") pod \"nova-metadata-0\" (UID: \"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd\") " pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.403634 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jll7\" (UniqueName: \"kubernetes.io/projected/5606214f-0456-4a22-87dd-a3a6624afaf5-kube-api-access-9jll7\") pod \"nova-cell0-conductor-0\" (UID: \"5606214f-0456-4a22-87dd-a3a6624afaf5\") " pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.484446 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.496169 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.537999 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6" path="/var/lib/kubelet/pods/6b3e4a1c-882b-4f48-a882-4dc8fe7eabf6/volumes" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.538672 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a356bfee-e892-474f-8f09-b179804dced1" path="/var/lib/kubelet/pods/a356bfee-e892-474f-8f09-b179804dced1/volumes" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.539249 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd10ed18-796a-4734-843c-9e2ae2973797" path="/var/lib/kubelet/pods/cd10ed18-796a-4734-843c-9e2ae2973797/volumes" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.540788 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f3b443-db5d-4635-b028-9adf4c846090" path="/var/lib/kubelet/pods/d7f3b443-db5d-4635-b028-9adf4c846090/volumes" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.570317 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.886168 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d369d884-79c4-4bd4-b952-0076f23d8e66","Type":"ContainerStarted","Data":"a7d8517ed76ce43bed5c7a606908f6a1b7b8dce1679a8085f487500ab80d4317"} Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.907469 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.907440255 podStartE2EDuration="2.907440255s" podCreationTimestamp="2025-11-30 09:06:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:06:07.904120052 +0000 UTC m=+8388.672291661" watchObservedRunningTime="2025-11-30 09:06:07.907440255 +0000 UTC m=+8388.675611864" Nov 30 09:06:07 crc kubenswrapper[4941]: I1130 09:06:07.991042 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.030715 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 30 09:06:08 crc kubenswrapper[4941]: W1130 09:06:08.036066 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7bb713a_b0ae_4bed_82ce_f1fa4de9281e.slice/crio-1abc09f52a6310bcd8ac71335d0f71e2bd5ce6862f6b74bb49014f293954995e WatchSource:0}: Error finding container 1abc09f52a6310bcd8ac71335d0f71e2bd5ce6862f6b74bb49014f293954995e: Status 404 returned error can't find the container with id 1abc09f52a6310bcd8ac71335d0f71e2bd5ce6862f6b74bb49014f293954995e Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.116784 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.899210 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd","Type":"ContainerStarted","Data":"ad8ddfe0c59b4f06d43447bc76d7c6129f1ca613c44e8e6743230b8ed22df54b"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.899604 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd","Type":"ContainerStarted","Data":"47cdd5902a8a4f1f145bbcb7ea9e11b27195446804f62e0d3ceb7647eb48bb07"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.899617 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fe20d7b-d57d-4291-b62a-a7d0e417ebdd","Type":"ContainerStarted","Data":"160183cc1f5c4901d7557d017ea8f930ac7631f3d77e619ecb97902509ba2b06"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.901222 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e","Type":"ContainerStarted","Data":"9fb02194be18d0df761491055f83479a5da9dfeae5b1e91a8172593e22985f81"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.901269 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e","Type":"ContainerStarted","Data":"2a8afc79109f26693646b4be8e1b7ea57ee01df5112f6a6310e8a3665a9b50dd"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.901287 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d7bb713a-b0ae-4bed-82ce-f1fa4de9281e","Type":"ContainerStarted","Data":"1abc09f52a6310bcd8ac71335d0f71e2bd5ce6862f6b74bb49014f293954995e"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.904251 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5606214f-0456-4a22-87dd-a3a6624afaf5","Type":"ContainerStarted","Data":"87d0c030fa9409481d26d0b0d256511e59eb03b46ff589d7937afe0b074f768d"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.904307 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5606214f-0456-4a22-87dd-a3a6624afaf5","Type":"ContainerStarted","Data":"c3befc942133a35e658cda48b6e1c8d0dfffbe0ba0e5c6c7e4442ede0fbdcae6"} Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.922169 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.922152518 podStartE2EDuration="2.922152518s" podCreationTimestamp="2025-11-30 09:06:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:06:08.918444112 +0000 UTC m=+8389.686615721" watchObservedRunningTime="2025-11-30 09:06:08.922152518 +0000 UTC m=+8389.690324127" Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.955010 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.9549838830000001 podStartE2EDuration="1.954983883s" podCreationTimestamp="2025-11-30 09:06:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:06:08.940609229 +0000 UTC m=+8389.708780828" watchObservedRunningTime="2025-11-30 09:06:08.954983883 +0000 UTC m=+8389.723155492" Nov 30 09:06:08 crc kubenswrapper[4941]: I1130 09:06:08.989943 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.989918025 podStartE2EDuration="2.989918025s" podCreationTimestamp="2025-11-30 09:06:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-30 09:06:08.979321217 +0000 UTC m=+8389.747492826" watchObservedRunningTime="2025-11-30 09:06:08.989918025 +0000 UTC m=+8389.758089634" Nov 30 09:06:09 crc kubenswrapper[4941]: I1130 09:06:09.914228 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:11 crc kubenswrapper[4941]: I1130 09:06:11.197429 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 30 09:06:12 crc kubenswrapper[4941]: I1130 09:06:12.484667 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 09:06:12 crc kubenswrapper[4941]: I1130 09:06:12.485178 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 30 09:06:15 crc kubenswrapper[4941]: I1130 09:06:15.304797 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 30 09:06:15 crc kubenswrapper[4941]: I1130 09:06:15.522190 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:06:15 crc kubenswrapper[4941]: E1130 09:06:15.522518 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:06:16 crc kubenswrapper[4941]: I1130 09:06:16.197220 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 30 09:06:16 crc kubenswrapper[4941]: I1130 09:06:16.250082 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 30 09:06:17 crc kubenswrapper[4941]: I1130 09:06:17.066897 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 30 09:06:17 crc kubenswrapper[4941]: I1130 09:06:17.484636 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 09:06:17 crc kubenswrapper[4941]: I1130 09:06:17.484706 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 30 09:06:17 crc kubenswrapper[4941]: I1130 09:06:17.496443 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 09:06:17 crc kubenswrapper[4941]: I1130 09:06:17.496635 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 30 09:06:17 crc kubenswrapper[4941]: I1130 09:06:17.601903 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 30 09:06:18 crc kubenswrapper[4941]: I1130 09:06:18.649902 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d7bb713a-b0ae-4bed-82ce-f1fa4de9281e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.170:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 09:06:18 crc kubenswrapper[4941]: I1130 09:06:18.649928 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7fe20d7b-d57d-4291-b62a-a7d0e417ebdd" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.169:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 09:06:18 crc kubenswrapper[4941]: I1130 09:06:18.649970 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7fe20d7b-d57d-4291-b62a-a7d0e417ebdd" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.169:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 09:06:18 crc kubenswrapper[4941]: I1130 09:06:18.649990 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d7bb713a-b0ae-4bed-82ce-f1fa4de9281e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.170:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.487553 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.489735 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.491934 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.502305 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.504383 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.504659 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 30 09:06:27 crc kubenswrapper[4941]: I1130 09:06:27.544642 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 09:06:28 crc kubenswrapper[4941]: I1130 09:06:28.166415 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 30 09:06:28 crc kubenswrapper[4941]: I1130 09:06:28.170591 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 30 09:06:28 crc kubenswrapper[4941]: I1130 09:06:28.171089 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.396250 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf"] Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.399176 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.404372 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.404562 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-mkbt6" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.404576 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.404608 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.404675 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.405006 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.405121 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.409251 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf"] Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487096 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487182 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487238 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487272 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxncn\" (UniqueName: \"kubernetes.io/projected/db6bcef0-5db9-4007-a09c-1435e4d37b48-kube-api-access-kxncn\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487307 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487352 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487385 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487453 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487475 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487501 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.487536 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590089 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590222 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590260 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxncn\" (UniqueName: \"kubernetes.io/projected/db6bcef0-5db9-4007-a09c-1435e4d37b48-kube-api-access-kxncn\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590309 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590356 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590402 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590574 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590603 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590645 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590741 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.590840 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.591380 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.597308 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.599063 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.600050 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.600503 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.601124 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.601186 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.601428 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.601634 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.603854 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.615689 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxncn\" (UniqueName: \"kubernetes.io/projected/db6bcef0-5db9-4007-a09c-1435e4d37b48-kube-api-access-kxncn\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:29 crc kubenswrapper[4941]: I1130 09:06:29.731149 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:06:30 crc kubenswrapper[4941]: I1130 09:06:30.360362 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf"] Nov 30 09:06:30 crc kubenswrapper[4941]: W1130 09:06:30.366203 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb6bcef0_5db9_4007_a09c_1435e4d37b48.slice/crio-2dc459a1294c84c59c787ce54c9c6694a85d194dc84af4c9fa13b0db71a09a2e WatchSource:0}: Error finding container 2dc459a1294c84c59c787ce54c9c6694a85d194dc84af4c9fa13b0db71a09a2e: Status 404 returned error can't find the container with id 2dc459a1294c84c59c787ce54c9c6694a85d194dc84af4c9fa13b0db71a09a2e Nov 30 09:06:30 crc kubenswrapper[4941]: I1130 09:06:30.522463 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:06:30 crc kubenswrapper[4941]: E1130 09:06:30.522820 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:06:31 crc kubenswrapper[4941]: I1130 09:06:31.202228 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" event={"ID":"db6bcef0-5db9-4007-a09c-1435e4d37b48","Type":"ContainerStarted","Data":"2dc459a1294c84c59c787ce54c9c6694a85d194dc84af4c9fa13b0db71a09a2e"} Nov 30 09:06:32 crc kubenswrapper[4941]: I1130 09:06:32.219988 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" event={"ID":"db6bcef0-5db9-4007-a09c-1435e4d37b48","Type":"ContainerStarted","Data":"7f59994dd6e1603d4cc3c24d9dd5613d6f5ac9717da703ef83c0fa27fc60bc47"} Nov 30 09:06:32 crc kubenswrapper[4941]: I1130 09:06:32.254571 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" podStartSLOduration=2.578109058 podStartE2EDuration="3.254546083s" podCreationTimestamp="2025-11-30 09:06:29 +0000 UTC" firstStartedPulling="2025-11-30 09:06:30.369088111 +0000 UTC m=+8411.137259720" lastFinishedPulling="2025-11-30 09:06:31.045525106 +0000 UTC m=+8411.813696745" observedRunningTime="2025-11-30 09:06:32.243933225 +0000 UTC m=+8413.012104854" watchObservedRunningTime="2025-11-30 09:06:32.254546083 +0000 UTC m=+8413.022717692" Nov 30 09:06:45 crc kubenswrapper[4941]: I1130 09:06:45.523053 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:06:45 crc kubenswrapper[4941]: E1130 09:06:45.524391 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:07:00 crc kubenswrapper[4941]: I1130 09:07:00.522700 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:07:00 crc kubenswrapper[4941]: E1130 09:07:00.523980 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:07:11 crc kubenswrapper[4941]: I1130 09:07:11.522313 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:07:11 crc kubenswrapper[4941]: E1130 09:07:11.523661 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:07:22 crc kubenswrapper[4941]: I1130 09:07:22.523246 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:07:22 crc kubenswrapper[4941]: E1130 09:07:22.524456 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:07:33 crc kubenswrapper[4941]: I1130 09:07:33.522569 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:07:35 crc kubenswrapper[4941]: I1130 09:07:35.030982 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"2e6f1e45c1cce03f2da53576726ab95a81ab52ffdf8333da897df5d8ea62ad43"} Nov 30 09:10:02 crc kubenswrapper[4941]: I1130 09:10:02.979255 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:10:02 crc kubenswrapper[4941]: I1130 09:10:02.980146 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:10:18 crc kubenswrapper[4941]: I1130 09:10:18.975995 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jmxqw"] Nov 30 09:10:18 crc kubenswrapper[4941]: I1130 09:10:18.979740 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:18 crc kubenswrapper[4941]: I1130 09:10:18.988409 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jmxqw"] Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.066205 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-utilities\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.066465 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-catalog-content\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.066526 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pqn2\" (UniqueName: \"kubernetes.io/projected/149c074f-3e86-4f07-b79f-dfb34544be80-kube-api-access-6pqn2\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.168960 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7ln2g"] Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.169089 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-utilities\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.169187 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-catalog-content\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.169224 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pqn2\" (UniqueName: \"kubernetes.io/projected/149c074f-3e86-4f07-b79f-dfb34544be80-kube-api-access-6pqn2\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.170258 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-utilities\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.170360 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-catalog-content\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.171974 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.181771 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ln2g"] Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.204695 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pqn2\" (UniqueName: \"kubernetes.io/projected/149c074f-3e86-4f07-b79f-dfb34544be80-kube-api-access-6pqn2\") pod \"redhat-operators-jmxqw\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.271922 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-catalog-content\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.271994 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7g4k\" (UniqueName: \"kubernetes.io/projected/fac33ee4-bea8-4039-a127-1195835ab603-kube-api-access-l7g4k\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.272175 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-utilities\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.315860 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.375550 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-catalog-content\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.376189 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7g4k\" (UniqueName: \"kubernetes.io/projected/fac33ee4-bea8-4039-a127-1195835ab603-kube-api-access-l7g4k\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.376261 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-utilities\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.376290 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-catalog-content\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.376787 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-utilities\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.403978 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7g4k\" (UniqueName: \"kubernetes.io/projected/fac33ee4-bea8-4039-a127-1195835ab603-kube-api-access-l7g4k\") pod \"redhat-marketplace-7ln2g\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.490787 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:19 crc kubenswrapper[4941]: I1130 09:10:19.980798 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jmxqw"] Nov 30 09:10:20 crc kubenswrapper[4941]: W1130 09:10:20.100392 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfac33ee4_bea8_4039_a127_1195835ab603.slice/crio-78ff1233020ef613af2243360b845c18b15231131ed248f907971bcc73a7a380 WatchSource:0}: Error finding container 78ff1233020ef613af2243360b845c18b15231131ed248f907971bcc73a7a380: Status 404 returned error can't find the container with id 78ff1233020ef613af2243360b845c18b15231131ed248f907971bcc73a7a380 Nov 30 09:10:20 crc kubenswrapper[4941]: I1130 09:10:20.103588 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ln2g"] Nov 30 09:10:20 crc kubenswrapper[4941]: I1130 09:10:20.303394 4941 generic.go:334] "Generic (PLEG): container finished" podID="149c074f-3e86-4f07-b79f-dfb34544be80" containerID="e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c" exitCode=0 Nov 30 09:10:20 crc kubenswrapper[4941]: I1130 09:10:20.303838 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerDied","Data":"e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c"} Nov 30 09:10:20 crc kubenswrapper[4941]: I1130 09:10:20.303878 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerStarted","Data":"5b411470efba313fcbb5bfdb9f5652c7aa81d87874b82a8b68e149f2e12d474b"} Nov 30 09:10:20 crc kubenswrapper[4941]: I1130 09:10:20.305008 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerStarted","Data":"78ff1233020ef613af2243360b845c18b15231131ed248f907971bcc73a7a380"} Nov 30 09:10:20 crc kubenswrapper[4941]: I1130 09:10:20.307861 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 09:10:20 crc kubenswrapper[4941]: E1130 09:10:20.380161 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod149c074f_3e86_4f07_b79f_dfb34544be80.slice/crio-conmon-e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod149c074f_3e86_4f07_b79f_dfb34544be80.slice/crio-e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c.scope\": RecentStats: unable to find data in memory cache]" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.342688 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerStarted","Data":"c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138"} Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.351688 4941 generic.go:334] "Generic (PLEG): container finished" podID="fac33ee4-bea8-4039-a127-1195835ab603" containerID="7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0" exitCode=0 Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.351766 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerDied","Data":"7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0"} Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.391614 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7d8zx"] Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.395570 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.409568 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7d8zx"] Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.457907 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdn92\" (UniqueName: \"kubernetes.io/projected/c8affef7-509a-460a-bb56-7957105c9c1b-kube-api-access-wdn92\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.458036 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-catalog-content\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.458077 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-utilities\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.560757 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-catalog-content\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.560818 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-utilities\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.560961 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdn92\" (UniqueName: \"kubernetes.io/projected/c8affef7-509a-460a-bb56-7957105c9c1b-kube-api-access-wdn92\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.561773 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-catalog-content\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.562294 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-utilities\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.651645 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdn92\" (UniqueName: \"kubernetes.io/projected/c8affef7-509a-460a-bb56-7957105c9c1b-kube-api-access-wdn92\") pod \"certified-operators-7d8zx\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:21 crc kubenswrapper[4941]: I1130 09:10:21.729885 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:22 crc kubenswrapper[4941]: I1130 09:10:22.437672 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7d8zx"] Nov 30 09:10:22 crc kubenswrapper[4941]: W1130 09:10:22.454303 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8affef7_509a_460a_bb56_7957105c9c1b.slice/crio-17831d6606f410de3229f0c841624ee5f261877e3a49a2e9dc9439da1072936a WatchSource:0}: Error finding container 17831d6606f410de3229f0c841624ee5f261877e3a49a2e9dc9439da1072936a: Status 404 returned error can't find the container with id 17831d6606f410de3229f0c841624ee5f261877e3a49a2e9dc9439da1072936a Nov 30 09:10:23 crc kubenswrapper[4941]: I1130 09:10:23.373672 4941 generic.go:334] "Generic (PLEG): container finished" podID="c8affef7-509a-460a-bb56-7957105c9c1b" containerID="2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1" exitCode=0 Nov 30 09:10:23 crc kubenswrapper[4941]: I1130 09:10:23.373823 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerDied","Data":"2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1"} Nov 30 09:10:23 crc kubenswrapper[4941]: I1130 09:10:23.374183 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerStarted","Data":"17831d6606f410de3229f0c841624ee5f261877e3a49a2e9dc9439da1072936a"} Nov 30 09:10:23 crc kubenswrapper[4941]: I1130 09:10:23.378027 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerStarted","Data":"2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455"} Nov 30 09:10:24 crc kubenswrapper[4941]: I1130 09:10:24.389971 4941 generic.go:334] "Generic (PLEG): container finished" podID="fac33ee4-bea8-4039-a127-1195835ab603" containerID="2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455" exitCode=0 Nov 30 09:10:24 crc kubenswrapper[4941]: I1130 09:10:24.390141 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerDied","Data":"2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455"} Nov 30 09:10:25 crc kubenswrapper[4941]: I1130 09:10:25.405883 4941 generic.go:334] "Generic (PLEG): container finished" podID="149c074f-3e86-4f07-b79f-dfb34544be80" containerID="c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138" exitCode=0 Nov 30 09:10:25 crc kubenswrapper[4941]: I1130 09:10:25.405973 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerDied","Data":"c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138"} Nov 30 09:10:25 crc kubenswrapper[4941]: I1130 09:10:25.413209 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerStarted","Data":"d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063"} Nov 30 09:10:25 crc kubenswrapper[4941]: I1130 09:10:25.416684 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerStarted","Data":"0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43"} Nov 30 09:10:25 crc kubenswrapper[4941]: I1130 09:10:25.502493 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7ln2g" podStartSLOduration=2.985681295 podStartE2EDuration="6.502462874s" podCreationTimestamp="2025-11-30 09:10:19 +0000 UTC" firstStartedPulling="2025-11-30 09:10:21.354039077 +0000 UTC m=+8642.122210686" lastFinishedPulling="2025-11-30 09:10:24.870820616 +0000 UTC m=+8645.638992265" observedRunningTime="2025-11-30 09:10:25.483781976 +0000 UTC m=+8646.251953575" watchObservedRunningTime="2025-11-30 09:10:25.502462874 +0000 UTC m=+8646.270634483" Nov 30 09:10:26 crc kubenswrapper[4941]: I1130 09:10:26.431346 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerStarted","Data":"041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7"} Nov 30 09:10:26 crc kubenswrapper[4941]: I1130 09:10:26.433744 4941 generic.go:334] "Generic (PLEG): container finished" podID="c8affef7-509a-460a-bb56-7957105c9c1b" containerID="d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063" exitCode=0 Nov 30 09:10:26 crc kubenswrapper[4941]: I1130 09:10:26.433816 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerDied","Data":"d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063"} Nov 30 09:10:26 crc kubenswrapper[4941]: I1130 09:10:26.462565 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jmxqw" podStartSLOduration=2.81582184 podStartE2EDuration="8.462541236s" podCreationTimestamp="2025-11-30 09:10:18 +0000 UTC" firstStartedPulling="2025-11-30 09:10:20.306108955 +0000 UTC m=+8641.074280564" lastFinishedPulling="2025-11-30 09:10:25.952828301 +0000 UTC m=+8646.720999960" observedRunningTime="2025-11-30 09:10:26.452611229 +0000 UTC m=+8647.220782838" watchObservedRunningTime="2025-11-30 09:10:26.462541236 +0000 UTC m=+8647.230712845" Nov 30 09:10:27 crc kubenswrapper[4941]: I1130 09:10:27.471675 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerStarted","Data":"0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e"} Nov 30 09:10:27 crc kubenswrapper[4941]: I1130 09:10:27.491426 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7d8zx" podStartSLOduration=2.920432813 podStartE2EDuration="6.491406718s" podCreationTimestamp="2025-11-30 09:10:21 +0000 UTC" firstStartedPulling="2025-11-30 09:10:23.376086456 +0000 UTC m=+8644.144258065" lastFinishedPulling="2025-11-30 09:10:26.947060361 +0000 UTC m=+8647.715231970" observedRunningTime="2025-11-30 09:10:27.488643792 +0000 UTC m=+8648.256815401" watchObservedRunningTime="2025-11-30 09:10:27.491406718 +0000 UTC m=+8648.259578327" Nov 30 09:10:29 crc kubenswrapper[4941]: I1130 09:10:29.316010 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:29 crc kubenswrapper[4941]: I1130 09:10:29.316586 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:29 crc kubenswrapper[4941]: I1130 09:10:29.491994 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:29 crc kubenswrapper[4941]: I1130 09:10:29.492071 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:29 crc kubenswrapper[4941]: I1130 09:10:29.542806 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:30 crc kubenswrapper[4941]: I1130 09:10:30.364353 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jmxqw" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="registry-server" probeResult="failure" output=< Nov 30 09:10:30 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 09:10:30 crc kubenswrapper[4941]: > Nov 30 09:10:30 crc kubenswrapper[4941]: I1130 09:10:30.565807 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:31 crc kubenswrapper[4941]: I1130 09:10:31.730933 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:31 crc kubenswrapper[4941]: I1130 09:10:31.731387 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:31 crc kubenswrapper[4941]: I1130 09:10:31.788621 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:32 crc kubenswrapper[4941]: I1130 09:10:32.576991 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:32 crc kubenswrapper[4941]: I1130 09:10:32.750415 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ln2g"] Nov 30 09:10:32 crc kubenswrapper[4941]: I1130 09:10:32.750685 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7ln2g" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="registry-server" containerID="cri-o://0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43" gracePeriod=2 Nov 30 09:10:32 crc kubenswrapper[4941]: I1130 09:10:32.979180 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:10:32 crc kubenswrapper[4941]: I1130 09:10:32.979981 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.307650 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.384518 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-catalog-content\") pod \"fac33ee4-bea8-4039-a127-1195835ab603\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.384731 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7g4k\" (UniqueName: \"kubernetes.io/projected/fac33ee4-bea8-4039-a127-1195835ab603-kube-api-access-l7g4k\") pod \"fac33ee4-bea8-4039-a127-1195835ab603\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.384909 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-utilities\") pod \"fac33ee4-bea8-4039-a127-1195835ab603\" (UID: \"fac33ee4-bea8-4039-a127-1195835ab603\") " Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.388258 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-utilities" (OuterVolumeSpecName: "utilities") pod "fac33ee4-bea8-4039-a127-1195835ab603" (UID: "fac33ee4-bea8-4039-a127-1195835ab603"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.398139 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fac33ee4-bea8-4039-a127-1195835ab603-kube-api-access-l7g4k" (OuterVolumeSpecName: "kube-api-access-l7g4k") pod "fac33ee4-bea8-4039-a127-1195835ab603" (UID: "fac33ee4-bea8-4039-a127-1195835ab603"). InnerVolumeSpecName "kube-api-access-l7g4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.411689 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fac33ee4-bea8-4039-a127-1195835ab603" (UID: "fac33ee4-bea8-4039-a127-1195835ab603"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.489406 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.489454 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7g4k\" (UniqueName: \"kubernetes.io/projected/fac33ee4-bea8-4039-a127-1195835ab603-kube-api-access-l7g4k\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.489472 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fac33ee4-bea8-4039-a127-1195835ab603-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.540975 4941 generic.go:334] "Generic (PLEG): container finished" podID="fac33ee4-bea8-4039-a127-1195835ab603" containerID="0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43" exitCode=0 Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.541031 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerDied","Data":"0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43"} Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.541898 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ln2g" event={"ID":"fac33ee4-bea8-4039-a127-1195835ab603","Type":"ContainerDied","Data":"78ff1233020ef613af2243360b845c18b15231131ed248f907971bcc73a7a380"} Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.541112 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ln2g" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.541956 4941 scope.go:117] "RemoveContainer" containerID="0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.581821 4941 scope.go:117] "RemoveContainer" containerID="2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.595575 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ln2g"] Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.608982 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ln2g"] Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.612489 4941 scope.go:117] "RemoveContainer" containerID="7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.681835 4941 scope.go:117] "RemoveContainer" containerID="0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43" Nov 30 09:10:33 crc kubenswrapper[4941]: E1130 09:10:33.682418 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43\": container with ID starting with 0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43 not found: ID does not exist" containerID="0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.682453 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43"} err="failed to get container status \"0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43\": rpc error: code = NotFound desc = could not find container \"0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43\": container with ID starting with 0795f8e329a03c328a682a771c40a21e0b459b408291cf5eae95a92dea47ce43 not found: ID does not exist" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.682481 4941 scope.go:117] "RemoveContainer" containerID="2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455" Nov 30 09:10:33 crc kubenswrapper[4941]: E1130 09:10:33.682841 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455\": container with ID starting with 2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455 not found: ID does not exist" containerID="2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.682867 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455"} err="failed to get container status \"2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455\": rpc error: code = NotFound desc = could not find container \"2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455\": container with ID starting with 2ef87d7edddd39e0f433242b892678a1264b5ad9ad2a942b726433e4744bc455 not found: ID does not exist" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.682881 4941 scope.go:117] "RemoveContainer" containerID="7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0" Nov 30 09:10:33 crc kubenswrapper[4941]: E1130 09:10:33.683072 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0\": container with ID starting with 7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0 not found: ID does not exist" containerID="7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0" Nov 30 09:10:33 crc kubenswrapper[4941]: I1130 09:10:33.683093 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0"} err="failed to get container status \"7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0\": rpc error: code = NotFound desc = could not find container \"7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0\": container with ID starting with 7dc63ac7e96b0de46c77c523c4fa1379d6728c65c2171ad3f5470a1980c780b0 not found: ID does not exist" Nov 30 09:10:34 crc kubenswrapper[4941]: I1130 09:10:34.154492 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7d8zx"] Nov 30 09:10:34 crc kubenswrapper[4941]: I1130 09:10:34.557437 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7d8zx" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="registry-server" containerID="cri-o://0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e" gracePeriod=2 Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.134619 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.247405 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdn92\" (UniqueName: \"kubernetes.io/projected/c8affef7-509a-460a-bb56-7957105c9c1b-kube-api-access-wdn92\") pod \"c8affef7-509a-460a-bb56-7957105c9c1b\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.247773 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-utilities\") pod \"c8affef7-509a-460a-bb56-7957105c9c1b\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.247815 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-catalog-content\") pod \"c8affef7-509a-460a-bb56-7957105c9c1b\" (UID: \"c8affef7-509a-460a-bb56-7957105c9c1b\") " Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.250125 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-utilities" (OuterVolumeSpecName: "utilities") pod "c8affef7-509a-460a-bb56-7957105c9c1b" (UID: "c8affef7-509a-460a-bb56-7957105c9c1b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.256737 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8affef7-509a-460a-bb56-7957105c9c1b-kube-api-access-wdn92" (OuterVolumeSpecName: "kube-api-access-wdn92") pod "c8affef7-509a-460a-bb56-7957105c9c1b" (UID: "c8affef7-509a-460a-bb56-7957105c9c1b"). InnerVolumeSpecName "kube-api-access-wdn92". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.266190 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdn92\" (UniqueName: \"kubernetes.io/projected/c8affef7-509a-460a-bb56-7957105c9c1b-kube-api-access-wdn92\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.266213 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.307673 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8affef7-509a-460a-bb56-7957105c9c1b" (UID: "c8affef7-509a-460a-bb56-7957105c9c1b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.368120 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8affef7-509a-460a-bb56-7957105c9c1b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.542540 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fac33ee4-bea8-4039-a127-1195835ab603" path="/var/lib/kubelet/pods/fac33ee4-bea8-4039-a127-1195835ab603/volumes" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.574901 4941 generic.go:334] "Generic (PLEG): container finished" podID="c8affef7-509a-460a-bb56-7957105c9c1b" containerID="0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e" exitCode=0 Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.574952 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerDied","Data":"0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e"} Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.574993 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7d8zx" event={"ID":"c8affef7-509a-460a-bb56-7957105c9c1b","Type":"ContainerDied","Data":"17831d6606f410de3229f0c841624ee5f261877e3a49a2e9dc9439da1072936a"} Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.575016 4941 scope.go:117] "RemoveContainer" containerID="0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.575023 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7d8zx" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.606580 4941 scope.go:117] "RemoveContainer" containerID="d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.607287 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7d8zx"] Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.618132 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7d8zx"] Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.634152 4941 scope.go:117] "RemoveContainer" containerID="2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.686287 4941 scope.go:117] "RemoveContainer" containerID="0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e" Nov 30 09:10:35 crc kubenswrapper[4941]: E1130 09:10:35.686881 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e\": container with ID starting with 0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e not found: ID does not exist" containerID="0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.686919 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e"} err="failed to get container status \"0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e\": rpc error: code = NotFound desc = could not find container \"0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e\": container with ID starting with 0648e682232a5d6d7b7242b1adac00d4b77776fd9af6af60728147463ecca94e not found: ID does not exist" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.686943 4941 scope.go:117] "RemoveContainer" containerID="d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063" Nov 30 09:10:35 crc kubenswrapper[4941]: E1130 09:10:35.687470 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063\": container with ID starting with d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063 not found: ID does not exist" containerID="d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.687496 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063"} err="failed to get container status \"d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063\": rpc error: code = NotFound desc = could not find container \"d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063\": container with ID starting with d7943b573b2ecb5be9c992921f8e7ccf36231ae51e0f9cf300d597380ca8a063 not found: ID does not exist" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.687509 4941 scope.go:117] "RemoveContainer" containerID="2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1" Nov 30 09:10:35 crc kubenswrapper[4941]: E1130 09:10:35.687894 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1\": container with ID starting with 2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1 not found: ID does not exist" containerID="2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1" Nov 30 09:10:35 crc kubenswrapper[4941]: I1130 09:10:35.687919 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1"} err="failed to get container status \"2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1\": rpc error: code = NotFound desc = could not find container \"2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1\": container with ID starting with 2122ee0c702ec9d312735e71ea3de1eb1fb481a39eccb25d5fc89f495e218fa1 not found: ID does not exist" Nov 30 09:10:37 crc kubenswrapper[4941]: I1130 09:10:37.558101 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" path="/var/lib/kubelet/pods/c8affef7-509a-460a-bb56-7957105c9c1b/volumes" Nov 30 09:10:39 crc kubenswrapper[4941]: I1130 09:10:39.361999 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:39 crc kubenswrapper[4941]: I1130 09:10:39.429211 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:39 crc kubenswrapper[4941]: I1130 09:10:39.753280 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jmxqw"] Nov 30 09:10:40 crc kubenswrapper[4941]: I1130 09:10:40.651234 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jmxqw" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="registry-server" containerID="cri-o://041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7" gracePeriod=2 Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.255143 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.364841 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pqn2\" (UniqueName: \"kubernetes.io/projected/149c074f-3e86-4f07-b79f-dfb34544be80-kube-api-access-6pqn2\") pod \"149c074f-3e86-4f07-b79f-dfb34544be80\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.365300 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-utilities\") pod \"149c074f-3e86-4f07-b79f-dfb34544be80\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.365744 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-catalog-content\") pod \"149c074f-3e86-4f07-b79f-dfb34544be80\" (UID: \"149c074f-3e86-4f07-b79f-dfb34544be80\") " Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.366274 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-utilities" (OuterVolumeSpecName: "utilities") pod "149c074f-3e86-4f07-b79f-dfb34544be80" (UID: "149c074f-3e86-4f07-b79f-dfb34544be80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.366862 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.374010 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/149c074f-3e86-4f07-b79f-dfb34544be80-kube-api-access-6pqn2" (OuterVolumeSpecName: "kube-api-access-6pqn2") pod "149c074f-3e86-4f07-b79f-dfb34544be80" (UID: "149c074f-3e86-4f07-b79f-dfb34544be80"). InnerVolumeSpecName "kube-api-access-6pqn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.470486 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pqn2\" (UniqueName: \"kubernetes.io/projected/149c074f-3e86-4f07-b79f-dfb34544be80-kube-api-access-6pqn2\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.504449 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "149c074f-3e86-4f07-b79f-dfb34544be80" (UID: "149c074f-3e86-4f07-b79f-dfb34544be80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.572798 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/149c074f-3e86-4f07-b79f-dfb34544be80-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.668321 4941 generic.go:334] "Generic (PLEG): container finished" podID="149c074f-3e86-4f07-b79f-dfb34544be80" containerID="041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7" exitCode=0 Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.668415 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerDied","Data":"041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7"} Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.668439 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jmxqw" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.668458 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jmxqw" event={"ID":"149c074f-3e86-4f07-b79f-dfb34544be80","Type":"ContainerDied","Data":"5b411470efba313fcbb5bfdb9f5652c7aa81d87874b82a8b68e149f2e12d474b"} Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.668486 4941 scope.go:117] "RemoveContainer" containerID="041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.707709 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jmxqw"] Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.722121 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jmxqw"] Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.724277 4941 scope.go:117] "RemoveContainer" containerID="c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.765063 4941 scope.go:117] "RemoveContainer" containerID="e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.807129 4941 scope.go:117] "RemoveContainer" containerID="041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7" Nov 30 09:10:41 crc kubenswrapper[4941]: E1130 09:10:41.808019 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7\": container with ID starting with 041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7 not found: ID does not exist" containerID="041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.808071 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7"} err="failed to get container status \"041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7\": rpc error: code = NotFound desc = could not find container \"041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7\": container with ID starting with 041718837aae0b3b47ab027fbdafd4c75cc9ea06a037972c8d13556e6ab461a7 not found: ID does not exist" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.808105 4941 scope.go:117] "RemoveContainer" containerID="c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138" Nov 30 09:10:41 crc kubenswrapper[4941]: E1130 09:10:41.808817 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138\": container with ID starting with c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138 not found: ID does not exist" containerID="c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.808863 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138"} err="failed to get container status \"c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138\": rpc error: code = NotFound desc = could not find container \"c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138\": container with ID starting with c7bac2b6884047fa132d1d6d90e1a6bd9292b488bdf30c6300166908eccb7138 not found: ID does not exist" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.808896 4941 scope.go:117] "RemoveContainer" containerID="e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c" Nov 30 09:10:41 crc kubenswrapper[4941]: E1130 09:10:41.809495 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c\": container with ID starting with e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c not found: ID does not exist" containerID="e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c" Nov 30 09:10:41 crc kubenswrapper[4941]: I1130 09:10:41.809560 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c"} err="failed to get container status \"e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c\": rpc error: code = NotFound desc = could not find container \"e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c\": container with ID starting with e07ebe6d8eb610e3a076d4486bfd1a119c98908afbdbfa09f9e02d7ee3ee982c not found: ID does not exist" Nov 30 09:10:43 crc kubenswrapper[4941]: I1130 09:10:43.546055 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" path="/var/lib/kubelet/pods/149c074f-3e86-4f07-b79f-dfb34544be80/volumes" Nov 30 09:11:02 crc kubenswrapper[4941]: I1130 09:11:02.978595 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:11:02 crc kubenswrapper[4941]: I1130 09:11:02.979529 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:11:02 crc kubenswrapper[4941]: I1130 09:11:02.979615 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:11:02 crc kubenswrapper[4941]: I1130 09:11:02.981076 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2e6f1e45c1cce03f2da53576726ab95a81ab52ffdf8333da897df5d8ea62ad43"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:11:02 crc kubenswrapper[4941]: I1130 09:11:02.981218 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://2e6f1e45c1cce03f2da53576726ab95a81ab52ffdf8333da897df5d8ea62ad43" gracePeriod=600 Nov 30 09:11:03 crc kubenswrapper[4941]: I1130 09:11:03.976090 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="2e6f1e45c1cce03f2da53576726ab95a81ab52ffdf8333da897df5d8ea62ad43" exitCode=0 Nov 30 09:11:03 crc kubenswrapper[4941]: I1130 09:11:03.976174 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"2e6f1e45c1cce03f2da53576726ab95a81ab52ffdf8333da897df5d8ea62ad43"} Nov 30 09:11:03 crc kubenswrapper[4941]: I1130 09:11:03.976899 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce"} Nov 30 09:11:03 crc kubenswrapper[4941]: I1130 09:11:03.976947 4941 scope.go:117] "RemoveContainer" containerID="8b63f1dc301a4aeabaa51607c0a0688b160dfc86bae3ea156d13fdca3d38a162" Nov 30 09:13:23 crc kubenswrapper[4941]: I1130 09:13:23.796635 4941 generic.go:334] "Generic (PLEG): container finished" podID="db6bcef0-5db9-4007-a09c-1435e4d37b48" containerID="7f59994dd6e1603d4cc3c24d9dd5613d6f5ac9717da703ef83c0fa27fc60bc47" exitCode=0 Nov 30 09:13:23 crc kubenswrapper[4941]: I1130 09:13:23.796731 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" event={"ID":"db6bcef0-5db9-4007-a09c-1435e4d37b48","Type":"ContainerDied","Data":"7f59994dd6e1603d4cc3c24d9dd5613d6f5ac9717da703ef83c0fa27fc60bc47"} Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.353435 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.426147 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-1\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.426443 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-combined-ca-bundle\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.426558 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ceph\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.426669 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-1\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.426867 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-0\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.426958 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ssh-key\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.427111 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxncn\" (UniqueName: \"kubernetes.io/projected/db6bcef0-5db9-4007-a09c-1435e4d37b48-kube-api-access-kxncn\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.427243 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-0\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.428067 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-1\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.428206 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-inventory\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.428385 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-0\") pod \"db6bcef0-5db9-4007-a09c-1435e4d37b48\" (UID: \"db6bcef0-5db9-4007-a09c-1435e4d37b48\") " Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.435189 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.435604 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ceph" (OuterVolumeSpecName: "ceph") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.436568 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db6bcef0-5db9-4007-a09c-1435e4d37b48-kube-api-access-kxncn" (OuterVolumeSpecName: "kube-api-access-kxncn") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "kube-api-access-kxncn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.463526 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.477902 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.496793 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.499406 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.501204 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.504816 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-inventory" (OuterVolumeSpecName: "inventory") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.511597 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.528592 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "db6bcef0-5db9-4007-a09c-1435e4d37b48" (UID: "db6bcef0-5db9-4007-a09c-1435e4d37b48"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.530965 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531011 4941 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ceph\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531026 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531039 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531053 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxncn\" (UniqueName: \"kubernetes.io/projected/db6bcef0-5db9-4007-a09c-1435e4d37b48-kube-api-access-kxncn\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531067 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531081 4941 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531094 4941 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-inventory\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531107 4941 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531120 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.531133 4941 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/db6bcef0-5db9-4007-a09c-1435e4d37b48-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.821696 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" event={"ID":"db6bcef0-5db9-4007-a09c-1435e4d37b48","Type":"ContainerDied","Data":"2dc459a1294c84c59c787ce54c9c6694a85d194dc84af4c9fa13b0db71a09a2e"} Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.821746 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dc459a1294c84c59c787ce54c9c6694a85d194dc84af4c9fa13b0db71a09a2e" Nov 30 09:13:25 crc kubenswrapper[4941]: I1130 09:13:25.821781 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf" Nov 30 09:13:32 crc kubenswrapper[4941]: I1130 09:13:32.980056 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:13:32 crc kubenswrapper[4941]: I1130 09:13:32.980573 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:14:02 crc kubenswrapper[4941]: I1130 09:14:02.979575 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:14:02 crc kubenswrapper[4941]: I1130 09:14:02.980616 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:14:32 crc kubenswrapper[4941]: I1130 09:14:32.978614 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:14:32 crc kubenswrapper[4941]: I1130 09:14:32.979263 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:14:32 crc kubenswrapper[4941]: I1130 09:14:32.979318 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:14:32 crc kubenswrapper[4941]: I1130 09:14:32.980111 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:14:32 crc kubenswrapper[4941]: I1130 09:14:32.980184 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" gracePeriod=600 Nov 30 09:14:33 crc kubenswrapper[4941]: E1130 09:14:33.103197 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:14:33 crc kubenswrapper[4941]: I1130 09:14:33.665942 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" exitCode=0 Nov 30 09:14:33 crc kubenswrapper[4941]: I1130 09:14:33.666065 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce"} Nov 30 09:14:33 crc kubenswrapper[4941]: I1130 09:14:33.666611 4941 scope.go:117] "RemoveContainer" containerID="2e6f1e45c1cce03f2da53576726ab95a81ab52ffdf8333da897df5d8ea62ad43" Nov 30 09:14:33 crc kubenswrapper[4941]: I1130 09:14:33.667934 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:14:33 crc kubenswrapper[4941]: E1130 09:14:33.668493 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:14:47 crc kubenswrapper[4941]: I1130 09:14:47.523010 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:14:47 crc kubenswrapper[4941]: E1130 09:14:47.524507 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:14:59 crc kubenswrapper[4941]: I1130 09:14:59.531252 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:14:59 crc kubenswrapper[4941]: E1130 09:14:59.532563 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162128 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf"] Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162758 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db6bcef0-5db9-4007-a09c-1435e4d37b48" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162784 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="db6bcef0-5db9-4007-a09c-1435e4d37b48" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162834 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="extract-utilities" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162842 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="extract-utilities" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162856 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="extract-content" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162863 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="extract-content" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162885 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162891 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162908 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="extract-utilities" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162917 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="extract-utilities" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162929 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="extract-utilities" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162936 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="extract-utilities" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162977 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.162984 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.162995 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163001 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.163017 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="extract-content" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163024 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="extract-content" Nov 30 09:15:00 crc kubenswrapper[4941]: E1130 09:15:00.163036 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="extract-content" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163042 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="extract-content" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163275 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="149c074f-3e86-4f07-b79f-dfb34544be80" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163295 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="db6bcef0-5db9-4007-a09c-1435e4d37b48" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163311 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="fac33ee4-bea8-4039-a127-1195835ab603" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.163341 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8affef7-509a-460a-bb56-7957105c9c1b" containerName="registry-server" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.164238 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.167105 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.169461 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.174904 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf"] Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.250051 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8719466c-c0a2-4d59-95cc-858918035a87-secret-volume\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.250802 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksn4t\" (UniqueName: \"kubernetes.io/projected/8719466c-c0a2-4d59-95cc-858918035a87-kube-api-access-ksn4t\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.251276 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8719466c-c0a2-4d59-95cc-858918035a87-config-volume\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.353221 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8719466c-c0a2-4d59-95cc-858918035a87-secret-volume\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.353288 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksn4t\" (UniqueName: \"kubernetes.io/projected/8719466c-c0a2-4d59-95cc-858918035a87-kube-api-access-ksn4t\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.353390 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8719466c-c0a2-4d59-95cc-858918035a87-config-volume\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.354522 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8719466c-c0a2-4d59-95cc-858918035a87-config-volume\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.360657 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8719466c-c0a2-4d59-95cc-858918035a87-secret-volume\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.371526 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksn4t\" (UniqueName: \"kubernetes.io/projected/8719466c-c0a2-4d59-95cc-858918035a87-kube-api-access-ksn4t\") pod \"collect-profiles-29408235-fr5hf\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:00 crc kubenswrapper[4941]: I1130 09:15:00.502115 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:01 crc kubenswrapper[4941]: I1130 09:15:01.042484 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf"] Nov 30 09:15:02 crc kubenswrapper[4941]: I1130 09:15:02.054684 4941 generic.go:334] "Generic (PLEG): container finished" podID="8719466c-c0a2-4d59-95cc-858918035a87" containerID="c9c7d76094f3d86501e801e0b5f687fcb1085b08530238479b9bb3b092e219b0" exitCode=0 Nov 30 09:15:02 crc kubenswrapper[4941]: I1130 09:15:02.054772 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" event={"ID":"8719466c-c0a2-4d59-95cc-858918035a87","Type":"ContainerDied","Data":"c9c7d76094f3d86501e801e0b5f687fcb1085b08530238479b9bb3b092e219b0"} Nov 30 09:15:02 crc kubenswrapper[4941]: I1130 09:15:02.056551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" event={"ID":"8719466c-c0a2-4d59-95cc-858918035a87","Type":"ContainerStarted","Data":"5e73b996d7458cff8f991cd30df828fab971415386c65e3d9001b243d632d979"} Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.583998 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.630480 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8719466c-c0a2-4d59-95cc-858918035a87-secret-volume\") pod \"8719466c-c0a2-4d59-95cc-858918035a87\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.630615 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8719466c-c0a2-4d59-95cc-858918035a87-config-volume\") pod \"8719466c-c0a2-4d59-95cc-858918035a87\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.630675 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksn4t\" (UniqueName: \"kubernetes.io/projected/8719466c-c0a2-4d59-95cc-858918035a87-kube-api-access-ksn4t\") pod \"8719466c-c0a2-4d59-95cc-858918035a87\" (UID: \"8719466c-c0a2-4d59-95cc-858918035a87\") " Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.632582 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8719466c-c0a2-4d59-95cc-858918035a87-config-volume" (OuterVolumeSpecName: "config-volume") pod "8719466c-c0a2-4d59-95cc-858918035a87" (UID: "8719466c-c0a2-4d59-95cc-858918035a87"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.633081 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8719466c-c0a2-4d59-95cc-858918035a87-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.639841 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8719466c-c0a2-4d59-95cc-858918035a87-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8719466c-c0a2-4d59-95cc-858918035a87" (UID: "8719466c-c0a2-4d59-95cc-858918035a87"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.640167 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8719466c-c0a2-4d59-95cc-858918035a87-kube-api-access-ksn4t" (OuterVolumeSpecName: "kube-api-access-ksn4t") pod "8719466c-c0a2-4d59-95cc-858918035a87" (UID: "8719466c-c0a2-4d59-95cc-858918035a87"). InnerVolumeSpecName "kube-api-access-ksn4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.734722 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8719466c-c0a2-4d59-95cc-858918035a87-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 09:15:03 crc kubenswrapper[4941]: I1130 09:15:03.734761 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksn4t\" (UniqueName: \"kubernetes.io/projected/8719466c-c0a2-4d59-95cc-858918035a87-kube-api-access-ksn4t\") on node \"crc\" DevicePath \"\"" Nov 30 09:15:04 crc kubenswrapper[4941]: I1130 09:15:04.088852 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" event={"ID":"8719466c-c0a2-4d59-95cc-858918035a87","Type":"ContainerDied","Data":"5e73b996d7458cff8f991cd30df828fab971415386c65e3d9001b243d632d979"} Nov 30 09:15:04 crc kubenswrapper[4941]: I1130 09:15:04.088907 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e73b996d7458cff8f991cd30df828fab971415386c65e3d9001b243d632d979" Nov 30 09:15:04 crc kubenswrapper[4941]: I1130 09:15:04.088964 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408235-fr5hf" Nov 30 09:15:04 crc kubenswrapper[4941]: I1130 09:15:04.683643 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl"] Nov 30 09:15:04 crc kubenswrapper[4941]: I1130 09:15:04.694665 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408190-zcdcl"] Nov 30 09:15:05 crc kubenswrapper[4941]: I1130 09:15:05.541071 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d" path="/var/lib/kubelet/pods/7e4aa5e4-86a4-4e41-abbe-8eeac5f5597d/volumes" Nov 30 09:15:13 crc kubenswrapper[4941]: I1130 09:15:13.522625 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:15:13 crc kubenswrapper[4941]: E1130 09:15:13.524010 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.041140 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pdc6w"] Nov 30 09:15:14 crc kubenswrapper[4941]: E1130 09:15:14.042155 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8719466c-c0a2-4d59-95cc-858918035a87" containerName="collect-profiles" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.042190 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8719466c-c0a2-4d59-95cc-858918035a87" containerName="collect-profiles" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.042685 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8719466c-c0a2-4d59-95cc-858918035a87" containerName="collect-profiles" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.045847 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.075517 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pdc6w"] Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.130049 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bktbs\" (UniqueName: \"kubernetes.io/projected/74a3cab8-4459-4346-9bc5-6e71fd726b3e-kube-api-access-bktbs\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.130199 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-utilities\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.130341 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-catalog-content\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.231369 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bktbs\" (UniqueName: \"kubernetes.io/projected/74a3cab8-4459-4346-9bc5-6e71fd726b3e-kube-api-access-bktbs\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.231494 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-utilities\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.231598 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-catalog-content\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.232043 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-utilities\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.232532 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-catalog-content\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.590478 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bktbs\" (UniqueName: \"kubernetes.io/projected/74a3cab8-4459-4346-9bc5-6e71fd726b3e-kube-api-access-bktbs\") pod \"community-operators-pdc6w\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:14 crc kubenswrapper[4941]: I1130 09:15:14.678005 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:15 crc kubenswrapper[4941]: I1130 09:15:15.201783 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pdc6w"] Nov 30 09:15:15 crc kubenswrapper[4941]: I1130 09:15:15.234011 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerStarted","Data":"5d7e41c55bf9a7e0ac21c4d15aa191dc275336aa90642078d9690cdede316beb"} Nov 30 09:15:16 crc kubenswrapper[4941]: I1130 09:15:16.249991 4941 generic.go:334] "Generic (PLEG): container finished" podID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerID="25ef59caf7f417f11f4906dd36df41d250a6d91cb7f2fd0e70f608f94323f63e" exitCode=0 Nov 30 09:15:16 crc kubenswrapper[4941]: I1130 09:15:16.250250 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerDied","Data":"25ef59caf7f417f11f4906dd36df41d250a6d91cb7f2fd0e70f608f94323f63e"} Nov 30 09:15:17 crc kubenswrapper[4941]: I1130 09:15:17.266942 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerStarted","Data":"c82768d4784b0d6a4ce8f487aa864e6826a490a58d73930bf0c752a715c0f4e8"} Nov 30 09:15:18 crc kubenswrapper[4941]: I1130 09:15:18.282996 4941 generic.go:334] "Generic (PLEG): container finished" podID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerID="c82768d4784b0d6a4ce8f487aa864e6826a490a58d73930bf0c752a715c0f4e8" exitCode=0 Nov 30 09:15:18 crc kubenswrapper[4941]: I1130 09:15:18.283087 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerDied","Data":"c82768d4784b0d6a4ce8f487aa864e6826a490a58d73930bf0c752a715c0f4e8"} Nov 30 09:15:19 crc kubenswrapper[4941]: I1130 09:15:19.300927 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerStarted","Data":"30072f1f5959556b820356a54e2d0d1eb7b5c8df3bfe1f73b379f7a7676e8960"} Nov 30 09:15:19 crc kubenswrapper[4941]: I1130 09:15:19.336258 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pdc6w" podStartSLOduration=2.863423262 podStartE2EDuration="5.33622562s" podCreationTimestamp="2025-11-30 09:15:14 +0000 UTC" firstStartedPulling="2025-11-30 09:15:16.253408223 +0000 UTC m=+8937.021579842" lastFinishedPulling="2025-11-30 09:15:18.726210591 +0000 UTC m=+8939.494382200" observedRunningTime="2025-11-30 09:15:19.321524866 +0000 UTC m=+8940.089696485" watchObservedRunningTime="2025-11-30 09:15:19.33622562 +0000 UTC m=+8940.104397249" Nov 30 09:15:24 crc kubenswrapper[4941]: I1130 09:15:24.678984 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:24 crc kubenswrapper[4941]: I1130 09:15:24.679884 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:24 crc kubenswrapper[4941]: I1130 09:15:24.735260 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:25 crc kubenswrapper[4941]: I1130 09:15:25.435707 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:26 crc kubenswrapper[4941]: I1130 09:15:26.830125 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pdc6w"] Nov 30 09:15:27 crc kubenswrapper[4941]: I1130 09:15:27.411284 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pdc6w" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="registry-server" containerID="cri-o://30072f1f5959556b820356a54e2d0d1eb7b5c8df3bfe1f73b379f7a7676e8960" gracePeriod=2 Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.439863 4941 generic.go:334] "Generic (PLEG): container finished" podID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerID="30072f1f5959556b820356a54e2d0d1eb7b5c8df3bfe1f73b379f7a7676e8960" exitCode=0 Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.440405 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerDied","Data":"30072f1f5959556b820356a54e2d0d1eb7b5c8df3bfe1f73b379f7a7676e8960"} Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.522249 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:15:28 crc kubenswrapper[4941]: E1130 09:15:28.522910 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.568452 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.656268 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-catalog-content\") pod \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.656474 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bktbs\" (UniqueName: \"kubernetes.io/projected/74a3cab8-4459-4346-9bc5-6e71fd726b3e-kube-api-access-bktbs\") pod \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.656506 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-utilities\") pod \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\" (UID: \"74a3cab8-4459-4346-9bc5-6e71fd726b3e\") " Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.658738 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-utilities" (OuterVolumeSpecName: "utilities") pod "74a3cab8-4459-4346-9bc5-6e71fd726b3e" (UID: "74a3cab8-4459-4346-9bc5-6e71fd726b3e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.665973 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74a3cab8-4459-4346-9bc5-6e71fd726b3e-kube-api-access-bktbs" (OuterVolumeSpecName: "kube-api-access-bktbs") pod "74a3cab8-4459-4346-9bc5-6e71fd726b3e" (UID: "74a3cab8-4459-4346-9bc5-6e71fd726b3e"). InnerVolumeSpecName "kube-api-access-bktbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.722348 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74a3cab8-4459-4346-9bc5-6e71fd726b3e" (UID: "74a3cab8-4459-4346-9bc5-6e71fd726b3e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.760934 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bktbs\" (UniqueName: \"kubernetes.io/projected/74a3cab8-4459-4346-9bc5-6e71fd726b3e-kube-api-access-bktbs\") on node \"crc\" DevicePath \"\"" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.760990 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:15:28 crc kubenswrapper[4941]: I1130 09:15:28.761016 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74a3cab8-4459-4346-9bc5-6e71fd726b3e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.455396 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pdc6w" event={"ID":"74a3cab8-4459-4346-9bc5-6e71fd726b3e","Type":"ContainerDied","Data":"5d7e41c55bf9a7e0ac21c4d15aa191dc275336aa90642078d9690cdede316beb"} Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.455487 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pdc6w" Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.455768 4941 scope.go:117] "RemoveContainer" containerID="30072f1f5959556b820356a54e2d0d1eb7b5c8df3bfe1f73b379f7a7676e8960" Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.491836 4941 scope.go:117] "RemoveContainer" containerID="c82768d4784b0d6a4ce8f487aa864e6826a490a58d73930bf0c752a715c0f4e8" Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.498771 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pdc6w"] Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.521306 4941 scope.go:117] "RemoveContainer" containerID="25ef59caf7f417f11f4906dd36df41d250a6d91cb7f2fd0e70f608f94323f63e" Nov 30 09:15:29 crc kubenswrapper[4941]: I1130 09:15:29.554862 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pdc6w"] Nov 30 09:15:31 crc kubenswrapper[4941]: I1130 09:15:31.539350 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" path="/var/lib/kubelet/pods/74a3cab8-4459-4346-9bc5-6e71fd726b3e/volumes" Nov 30 09:15:39 crc kubenswrapper[4941]: I1130 09:15:39.343739 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Nov 30 09:15:39 crc kubenswrapper[4941]: I1130 09:15:39.345047 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" containerName="adoption" containerID="cri-o://b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae" gracePeriod=30 Nov 30 09:15:43 crc kubenswrapper[4941]: I1130 09:15:43.523930 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:15:43 crc kubenswrapper[4941]: E1130 09:15:43.524734 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:15:53 crc kubenswrapper[4941]: I1130 09:15:53.871405 4941 scope.go:117] "RemoveContainer" containerID="b28fd70c1644afe1ec19026a74f9edf9da3e058501a6e272f7813788fe1fb56f" Nov 30 09:15:55 crc kubenswrapper[4941]: I1130 09:15:55.522830 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:15:55 crc kubenswrapper[4941]: E1130 09:15:55.523558 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:16:07 crc kubenswrapper[4941]: I1130 09:16:07.521848 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:16:07 crc kubenswrapper[4941]: E1130 09:16:07.523038 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.915567 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.933957 4941 generic.go:334] "Generic (PLEG): container finished" podID="1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" containerID="b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae" exitCode=137 Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.934005 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0","Type":"ContainerDied","Data":"b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae"} Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.934043 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0","Type":"ContainerDied","Data":"e65cc1189defc74db3b9dd09251dd439c51977d6c768210224c1802e2fa09c27"} Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.934065 4941 scope.go:117] "RemoveContainer" containerID="b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae" Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.934113 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.969457 4941 scope.go:117] "RemoveContainer" containerID="b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae" Nov 30 09:16:09 crc kubenswrapper[4941]: E1130 09:16:09.970031 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae\": container with ID starting with b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae not found: ID does not exist" containerID="b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae" Nov 30 09:16:09 crc kubenswrapper[4941]: I1130 09:16:09.970077 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae"} err="failed to get container status \"b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae\": rpc error: code = NotFound desc = could not find container \"b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae\": container with ID starting with b901cce7edb54578eeab97f88bcf5217d36301ce9beeac0e9a00fb65cbb665ae not found: ID does not exist" Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.014048 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79kws\" (UniqueName: \"kubernetes.io/projected/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0-kube-api-access-79kws\") pod \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.014748 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") pod \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\" (UID: \"1e7d4833-4d03-4e88-af2b-dbba79cf7cd0\") " Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.021066 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0-kube-api-access-79kws" (OuterVolumeSpecName: "kube-api-access-79kws") pod "1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" (UID: "1e7d4833-4d03-4e88-af2b-dbba79cf7cd0"). InnerVolumeSpecName "kube-api-access-79kws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.031809 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b" (OuterVolumeSpecName: "mariadb-data") pod "1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" (UID: "1e7d4833-4d03-4e88-af2b-dbba79cf7cd0"). InnerVolumeSpecName "pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.117661 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79kws\" (UniqueName: \"kubernetes.io/projected/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0-kube-api-access-79kws\") on node \"crc\" DevicePath \"\"" Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.117751 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") on node \"crc\" " Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.148713 4941 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.149036 4941 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b") on node "crc" Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.219756 4941 reconciler_common.go:293] "Volume detached for volume \"pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb22719-e1e8-4b1a-a929-db1d42bb3e9b\") on node \"crc\" DevicePath \"\"" Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.275520 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Nov 30 09:16:10 crc kubenswrapper[4941]: I1130 09:16:10.289510 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Nov 30 09:16:11 crc kubenswrapper[4941]: I1130 09:16:11.077806 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Nov 30 09:16:11 crc kubenswrapper[4941]: I1130 09:16:11.078539 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="2940abf6-24c7-499a-8c68-9d4595b42f80" containerName="adoption" containerID="cri-o://c4f1b6f58925c904de5aeeb3fa736e3fb718b9093b5b269f8bc3190cb23246e2" gracePeriod=30 Nov 30 09:16:11 crc kubenswrapper[4941]: I1130 09:16:11.544317 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" path="/var/lib/kubelet/pods/1e7d4833-4d03-4e88-af2b-dbba79cf7cd0/volumes" Nov 30 09:16:18 crc kubenswrapper[4941]: I1130 09:16:18.522799 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:16:18 crc kubenswrapper[4941]: E1130 09:16:18.523788 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:16:33 crc kubenswrapper[4941]: I1130 09:16:33.523056 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:16:33 crc kubenswrapper[4941]: E1130 09:16:33.524039 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.361365 4941 generic.go:334] "Generic (PLEG): container finished" podID="2940abf6-24c7-499a-8c68-9d4595b42f80" containerID="c4f1b6f58925c904de5aeeb3fa736e3fb718b9093b5b269f8bc3190cb23246e2" exitCode=137 Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.361508 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"2940abf6-24c7-499a-8c68-9d4595b42f80","Type":"ContainerDied","Data":"c4f1b6f58925c904de5aeeb3fa736e3fb718b9093b5b269f8bc3190cb23246e2"} Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.627525 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.774111 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/2940abf6-24c7-499a-8c68-9d4595b42f80-ovn-data-cert\") pod \"2940abf6-24c7-499a-8c68-9d4595b42f80\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.774712 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") pod \"2940abf6-24c7-499a-8c68-9d4595b42f80\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.774878 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m82v\" (UniqueName: \"kubernetes.io/projected/2940abf6-24c7-499a-8c68-9d4595b42f80-kube-api-access-7m82v\") pod \"2940abf6-24c7-499a-8c68-9d4595b42f80\" (UID: \"2940abf6-24c7-499a-8c68-9d4595b42f80\") " Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.782173 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2940abf6-24c7-499a-8c68-9d4595b42f80-kube-api-access-7m82v" (OuterVolumeSpecName: "kube-api-access-7m82v") pod "2940abf6-24c7-499a-8c68-9d4595b42f80" (UID: "2940abf6-24c7-499a-8c68-9d4595b42f80"). InnerVolumeSpecName "kube-api-access-7m82v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.782165 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2940abf6-24c7-499a-8c68-9d4595b42f80-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "2940abf6-24c7-499a-8c68-9d4595b42f80" (UID: "2940abf6-24c7-499a-8c68-9d4595b42f80"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.794823 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc" (OuterVolumeSpecName: "ovn-data") pod "2940abf6-24c7-499a-8c68-9d4595b42f80" (UID: "2940abf6-24c7-499a-8c68-9d4595b42f80"). InnerVolumeSpecName "pvc-9cff42f1-df09-4855-982a-b703169198fc". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.878035 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m82v\" (UniqueName: \"kubernetes.io/projected/2940abf6-24c7-499a-8c68-9d4595b42f80-kube-api-access-7m82v\") on node \"crc\" DevicePath \"\"" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.878083 4941 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/2940abf6-24c7-499a-8c68-9d4595b42f80-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.878126 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9cff42f1-df09-4855-982a-b703169198fc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") on node \"crc\" " Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.906991 4941 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.907705 4941 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9cff42f1-df09-4855-982a-b703169198fc" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc") on node "crc" Nov 30 09:16:41 crc kubenswrapper[4941]: I1130 09:16:41.980856 4941 reconciler_common.go:293] "Volume detached for volume \"pvc-9cff42f1-df09-4855-982a-b703169198fc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9cff42f1-df09-4855-982a-b703169198fc\") on node \"crc\" DevicePath \"\"" Nov 30 09:16:42 crc kubenswrapper[4941]: I1130 09:16:42.377237 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"2940abf6-24c7-499a-8c68-9d4595b42f80","Type":"ContainerDied","Data":"8f436723875bd88b34c561c1279e9da1ec7536a89f89fccdcd6c11d23c73bdda"} Nov 30 09:16:42 crc kubenswrapper[4941]: I1130 09:16:42.377317 4941 scope.go:117] "RemoveContainer" containerID="c4f1b6f58925c904de5aeeb3fa736e3fb718b9093b5b269f8bc3190cb23246e2" Nov 30 09:16:42 crc kubenswrapper[4941]: I1130 09:16:42.377549 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 30 09:16:42 crc kubenswrapper[4941]: I1130 09:16:42.436753 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Nov 30 09:16:42 crc kubenswrapper[4941]: I1130 09:16:42.449187 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Nov 30 09:16:43 crc kubenswrapper[4941]: I1130 09:16:43.536973 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2940abf6-24c7-499a-8c68-9d4595b42f80" path="/var/lib/kubelet/pods/2940abf6-24c7-499a-8c68-9d4595b42f80/volumes" Nov 30 09:16:46 crc kubenswrapper[4941]: I1130 09:16:46.523172 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:16:46 crc kubenswrapper[4941]: E1130 09:16:46.525806 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:16:58 crc kubenswrapper[4941]: I1130 09:16:58.522510 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:16:58 crc kubenswrapper[4941]: E1130 09:16:58.523289 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.619749 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Nov 30 09:17:02 crc kubenswrapper[4941]: E1130 09:17:02.620931 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2940abf6-24c7-499a-8c68-9d4595b42f80" containerName="adoption" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.620948 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="2940abf6-24c7-499a-8c68-9d4595b42f80" containerName="adoption" Nov 30 09:17:02 crc kubenswrapper[4941]: E1130 09:17:02.620963 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="extract-utilities" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.620970 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="extract-utilities" Nov 30 09:17:02 crc kubenswrapper[4941]: E1130 09:17:02.620984 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="registry-server" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.620992 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="registry-server" Nov 30 09:17:02 crc kubenswrapper[4941]: E1130 09:17:02.621001 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="extract-content" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.621007 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="extract-content" Nov 30 09:17:02 crc kubenswrapper[4941]: E1130 09:17:02.621016 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" containerName="adoption" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.621021 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" containerName="adoption" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.621246 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="2940abf6-24c7-499a-8c68-9d4595b42f80" containerName="adoption" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.621269 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="74a3cab8-4459-4346-9bc5-6e71fd726b3e" containerName="registry-server" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.621288 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e7d4833-4d03-4e88-af2b-dbba79cf7cd0" containerName="adoption" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.622250 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.627097 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.627120 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.627191 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.627421 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.638983 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.744988 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745409 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745455 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745552 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745580 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745607 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745840 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45glb\" (UniqueName: \"kubernetes.io/projected/8501edc2-e733-4e75-9afd-ecefc4f74de2-kube-api-access-45glb\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745885 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-config-data\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.745934 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848430 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45glb\" (UniqueName: \"kubernetes.io/projected/8501edc2-e733-4e75-9afd-ecefc4f74de2-kube-api-access-45glb\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848499 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-config-data\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848539 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848586 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848630 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848656 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848699 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848721 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.848742 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.849492 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.849592 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.849836 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.850535 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-config-data\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.850789 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.855463 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.856001 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.866477 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45glb\" (UniqueName: \"kubernetes.io/projected/8501edc2-e733-4e75-9afd-ecefc4f74de2-kube-api-access-45glb\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.867918 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.890648 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " pod="openstack/tempest-tests-tempest" Nov 30 09:17:02 crc kubenswrapper[4941]: I1130 09:17:02.957045 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 30 09:17:03 crc kubenswrapper[4941]: I1130 09:17:03.457700 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 09:17:03 crc kubenswrapper[4941]: I1130 09:17:03.459299 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Nov 30 09:17:03 crc kubenswrapper[4941]: I1130 09:17:03.648695 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8501edc2-e733-4e75-9afd-ecefc4f74de2","Type":"ContainerStarted","Data":"540106f0d7985720d08eb269bb207cc017c0b727e28f0c1e059ef2b799bbccc8"} Nov 30 09:17:11 crc kubenswrapper[4941]: I1130 09:17:11.522630 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:17:11 crc kubenswrapper[4941]: E1130 09:17:11.523906 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:17:24 crc kubenswrapper[4941]: I1130 09:17:24.522245 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:17:24 crc kubenswrapper[4941]: E1130 09:17:24.523077 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:17:39 crc kubenswrapper[4941]: I1130 09:17:39.530914 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:17:39 crc kubenswrapper[4941]: E1130 09:17:39.531789 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:17:54 crc kubenswrapper[4941]: I1130 09:17:54.522261 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:17:54 crc kubenswrapper[4941]: E1130 09:17:54.523220 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:17:58 crc kubenswrapper[4941]: E1130 09:17:58.839313 4941 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd" Nov 30 09:17:58 crc kubenswrapper[4941]: E1130 09:17:58.840583 4941 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd" Nov 30 09:17:58 crc kubenswrapper[4941]: E1130 09:17:58.840858 4941 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-45glb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(8501edc2-e733-4e75-9afd-ecefc4f74de2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 30 09:17:58 crc kubenswrapper[4941]: E1130 09:17:58.842145 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="8501edc2-e733-4e75-9afd-ecefc4f74de2" Nov 30 09:17:59 crc kubenswrapper[4941]: E1130 09:17:59.394293 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd\\\"\"" pod="openstack/tempest-tests-tempest" podUID="8501edc2-e733-4e75-9afd-ecefc4f74de2" Nov 30 09:18:08 crc kubenswrapper[4941]: I1130 09:18:08.522256 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:18:08 crc kubenswrapper[4941]: E1130 09:18:08.523393 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:18:12 crc kubenswrapper[4941]: I1130 09:18:12.756413 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Nov 30 09:18:14 crc kubenswrapper[4941]: I1130 09:18:14.582670 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8501edc2-e733-4e75-9afd-ecefc4f74de2","Type":"ContainerStarted","Data":"a16952e422a63e634037765ddffbfdb715e56abde8c12cef9431bdd26ccf275f"} Nov 30 09:18:14 crc kubenswrapper[4941]: I1130 09:18:14.629590 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.333601492 podStartE2EDuration="1m13.629547231s" podCreationTimestamp="2025-11-30 09:17:01 +0000 UTC" firstStartedPulling="2025-11-30 09:17:03.457395547 +0000 UTC m=+9044.225567156" lastFinishedPulling="2025-11-30 09:18:12.753341286 +0000 UTC m=+9113.521512895" observedRunningTime="2025-11-30 09:18:14.616684873 +0000 UTC m=+9115.384856562" watchObservedRunningTime="2025-11-30 09:18:14.629547231 +0000 UTC m=+9115.397718880" Nov 30 09:18:22 crc kubenswrapper[4941]: I1130 09:18:22.522463 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:18:22 crc kubenswrapper[4941]: E1130 09:18:22.524021 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:18:34 crc kubenswrapper[4941]: I1130 09:18:34.522244 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:18:34 crc kubenswrapper[4941]: E1130 09:18:34.523384 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:18:45 crc kubenswrapper[4941]: I1130 09:18:45.523027 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:18:45 crc kubenswrapper[4941]: E1130 09:18:45.523920 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:18:58 crc kubenswrapper[4941]: I1130 09:18:58.522532 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:18:58 crc kubenswrapper[4941]: E1130 09:18:58.523460 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:19:09 crc kubenswrapper[4941]: I1130 09:19:09.535542 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:19:09 crc kubenswrapper[4941]: E1130 09:19:09.536684 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:19:24 crc kubenswrapper[4941]: I1130 09:19:24.522609 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:19:24 crc kubenswrapper[4941]: E1130 09:19:24.523960 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:19:38 crc kubenswrapper[4941]: I1130 09:19:38.523122 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:19:39 crc kubenswrapper[4941]: I1130 09:19:39.602660 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"fd03ca3f2fb180c206b843dd0d38c762f5d17c6bb7f0ce3326052857e4af3e3a"} Nov 30 09:20:43 crc kubenswrapper[4941]: I1130 09:20:43.959191 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5p6ss"] Nov 30 09:20:43 crc kubenswrapper[4941]: I1130 09:20:43.963272 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:43 crc kubenswrapper[4941]: I1130 09:20:43.978535 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5p6ss"] Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.089676 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-catalog-content\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.090130 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-utilities\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.090166 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz4bf\" (UniqueName: \"kubernetes.io/projected/1f52b66d-77a1-4cb6-a482-a4d937af8969-kube-api-access-kz4bf\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.192447 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-catalog-content\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.192582 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-utilities\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.192612 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz4bf\" (UniqueName: \"kubernetes.io/projected/1f52b66d-77a1-4cb6-a482-a4d937af8969-kube-api-access-kz4bf\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.193158 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-utilities\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.193230 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-catalog-content\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.229448 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz4bf\" (UniqueName: \"kubernetes.io/projected/1f52b66d-77a1-4cb6-a482-a4d937af8969-kube-api-access-kz4bf\") pod \"certified-operators-5p6ss\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.284957 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:44 crc kubenswrapper[4941]: I1130 09:20:44.936234 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5p6ss"] Nov 30 09:20:45 crc kubenswrapper[4941]: I1130 09:20:45.391315 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerStarted","Data":"84526ee9550cf7f6df24ac2731e52d8bf0ebb40ef8eab4db7bff32d85b75debb"} Nov 30 09:20:46 crc kubenswrapper[4941]: I1130 09:20:46.401094 4941 generic.go:334] "Generic (PLEG): container finished" podID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerID="267a23b0c33cb2682cb759d2317688ae166bee344ea60a4a5108f620a033acd7" exitCode=0 Nov 30 09:20:46 crc kubenswrapper[4941]: I1130 09:20:46.401201 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerDied","Data":"267a23b0c33cb2682cb759d2317688ae166bee344ea60a4a5108f620a033acd7"} Nov 30 09:20:47 crc kubenswrapper[4941]: I1130 09:20:47.415498 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerStarted","Data":"331c74f84319431bef1946ffe76db517461541f8d22771e9e930f3b7b3639fbf"} Nov 30 09:20:48 crc kubenswrapper[4941]: I1130 09:20:48.426862 4941 generic.go:334] "Generic (PLEG): container finished" podID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerID="331c74f84319431bef1946ffe76db517461541f8d22771e9e930f3b7b3639fbf" exitCode=0 Nov 30 09:20:48 crc kubenswrapper[4941]: I1130 09:20:48.426972 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerDied","Data":"331c74f84319431bef1946ffe76db517461541f8d22771e9e930f3b7b3639fbf"} Nov 30 09:20:49 crc kubenswrapper[4941]: I1130 09:20:49.440415 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerStarted","Data":"678cf9243b9b834f3810006d0040b30dbfe9d34dd9d022272254a4daf5c5619d"} Nov 30 09:20:49 crc kubenswrapper[4941]: I1130 09:20:49.464633 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5p6ss" podStartSLOduration=3.978785562 podStartE2EDuration="6.464609024s" podCreationTimestamp="2025-11-30 09:20:43 +0000 UTC" firstStartedPulling="2025-11-30 09:20:46.402981082 +0000 UTC m=+9267.171152701" lastFinishedPulling="2025-11-30 09:20:48.888804544 +0000 UTC m=+9269.656976163" observedRunningTime="2025-11-30 09:20:49.456621807 +0000 UTC m=+9270.224793406" watchObservedRunningTime="2025-11-30 09:20:49.464609024 +0000 UTC m=+9270.232780633" Nov 30 09:20:54 crc kubenswrapper[4941]: I1130 09:20:54.286065 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:54 crc kubenswrapper[4941]: I1130 09:20:54.286641 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:54 crc kubenswrapper[4941]: I1130 09:20:54.401105 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:54 crc kubenswrapper[4941]: I1130 09:20:54.546838 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:54 crc kubenswrapper[4941]: I1130 09:20:54.643471 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5p6ss"] Nov 30 09:20:56 crc kubenswrapper[4941]: I1130 09:20:56.511857 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5p6ss" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="registry-server" containerID="cri-o://678cf9243b9b834f3810006d0040b30dbfe9d34dd9d022272254a4daf5c5619d" gracePeriod=2 Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.535473 4941 generic.go:334] "Generic (PLEG): container finished" podID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerID="678cf9243b9b834f3810006d0040b30dbfe9d34dd9d022272254a4daf5c5619d" exitCode=0 Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.535827 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerDied","Data":"678cf9243b9b834f3810006d0040b30dbfe9d34dd9d022272254a4daf5c5619d"} Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.746009 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.806537 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz4bf\" (UniqueName: \"kubernetes.io/projected/1f52b66d-77a1-4cb6-a482-a4d937af8969-kube-api-access-kz4bf\") pod \"1f52b66d-77a1-4cb6-a482-a4d937af8969\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.806655 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-catalog-content\") pod \"1f52b66d-77a1-4cb6-a482-a4d937af8969\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.806856 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-utilities\") pod \"1f52b66d-77a1-4cb6-a482-a4d937af8969\" (UID: \"1f52b66d-77a1-4cb6-a482-a4d937af8969\") " Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.808142 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-utilities" (OuterVolumeSpecName: "utilities") pod "1f52b66d-77a1-4cb6-a482-a4d937af8969" (UID: "1f52b66d-77a1-4cb6-a482-a4d937af8969"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.816256 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f52b66d-77a1-4cb6-a482-a4d937af8969-kube-api-access-kz4bf" (OuterVolumeSpecName: "kube-api-access-kz4bf") pod "1f52b66d-77a1-4cb6-a482-a4d937af8969" (UID: "1f52b66d-77a1-4cb6-a482-a4d937af8969"). InnerVolumeSpecName "kube-api-access-kz4bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.873569 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f52b66d-77a1-4cb6-a482-a4d937af8969" (UID: "1f52b66d-77a1-4cb6-a482-a4d937af8969"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.909027 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz4bf\" (UniqueName: \"kubernetes.io/projected/1f52b66d-77a1-4cb6-a482-a4d937af8969-kube-api-access-kz4bf\") on node \"crc\" DevicePath \"\"" Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.909071 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:20:57 crc kubenswrapper[4941]: I1130 09:20:57.909081 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f52b66d-77a1-4cb6-a482-a4d937af8969-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.548610 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5p6ss" event={"ID":"1f52b66d-77a1-4cb6-a482-a4d937af8969","Type":"ContainerDied","Data":"84526ee9550cf7f6df24ac2731e52d8bf0ebb40ef8eab4db7bff32d85b75debb"} Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.548694 4941 scope.go:117] "RemoveContainer" containerID="678cf9243b9b834f3810006d0040b30dbfe9d34dd9d022272254a4daf5c5619d" Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.548881 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5p6ss" Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.595541 4941 scope.go:117] "RemoveContainer" containerID="331c74f84319431bef1946ffe76db517461541f8d22771e9e930f3b7b3639fbf" Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.604514 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5p6ss"] Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.617611 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5p6ss"] Nov 30 09:20:58 crc kubenswrapper[4941]: I1130 09:20:58.643773 4941 scope.go:117] "RemoveContainer" containerID="267a23b0c33cb2682cb759d2317688ae166bee344ea60a4a5108f620a033acd7" Nov 30 09:20:59 crc kubenswrapper[4941]: I1130 09:20:59.535182 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" path="/var/lib/kubelet/pods/1f52b66d-77a1-4cb6-a482-a4d937af8969/volumes" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.924559 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g96qr"] Nov 30 09:21:17 crc kubenswrapper[4941]: E1130 09:21:17.926089 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="registry-server" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.926109 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="registry-server" Nov 30 09:21:17 crc kubenswrapper[4941]: E1130 09:21:17.926129 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="extract-content" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.926135 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="extract-content" Nov 30 09:21:17 crc kubenswrapper[4941]: E1130 09:21:17.926148 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="extract-utilities" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.926156 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="extract-utilities" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.926418 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f52b66d-77a1-4cb6-a482-a4d937af8969" containerName="registry-server" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.937206 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g96qr"] Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.937851 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.993507 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-catalog-content\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.993835 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-utilities\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:17 crc kubenswrapper[4941]: I1130 09:21:17.993882 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snpjb\" (UniqueName: \"kubernetes.io/projected/a8216140-7b27-475f-8b09-2614997b3c31-kube-api-access-snpjb\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.096034 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snpjb\" (UniqueName: \"kubernetes.io/projected/a8216140-7b27-475f-8b09-2614997b3c31-kube-api-access-snpjb\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.096223 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-catalog-content\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.096286 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-utilities\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.096826 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-catalog-content\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.096877 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-utilities\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.117167 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snpjb\" (UniqueName: \"kubernetes.io/projected/a8216140-7b27-475f-8b09-2614997b3c31-kube-api-access-snpjb\") pod \"redhat-marketplace-g96qr\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.271016 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:18 crc kubenswrapper[4941]: I1130 09:21:18.877926 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g96qr"] Nov 30 09:21:19 crc kubenswrapper[4941]: I1130 09:21:19.811659 4941 generic.go:334] "Generic (PLEG): container finished" podID="a8216140-7b27-475f-8b09-2614997b3c31" containerID="ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace" exitCode=0 Nov 30 09:21:19 crc kubenswrapper[4941]: I1130 09:21:19.812153 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerDied","Data":"ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace"} Nov 30 09:21:19 crc kubenswrapper[4941]: I1130 09:21:19.812182 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerStarted","Data":"8b1d7f2e969917c66c084b028e843d671d3d680fcfe89d03040270e35c7558bd"} Nov 30 09:21:21 crc kubenswrapper[4941]: I1130 09:21:21.835322 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerStarted","Data":"d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e"} Nov 30 09:21:22 crc kubenswrapper[4941]: I1130 09:21:22.863733 4941 generic.go:334] "Generic (PLEG): container finished" podID="a8216140-7b27-475f-8b09-2614997b3c31" containerID="d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e" exitCode=0 Nov 30 09:21:22 crc kubenswrapper[4941]: I1130 09:21:22.864142 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerDied","Data":"d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e"} Nov 30 09:21:23 crc kubenswrapper[4941]: I1130 09:21:23.879533 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerStarted","Data":"9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4"} Nov 30 09:21:23 crc kubenswrapper[4941]: I1130 09:21:23.908169 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g96qr" podStartSLOduration=3.053403703 podStartE2EDuration="6.908149184s" podCreationTimestamp="2025-11-30 09:21:17 +0000 UTC" firstStartedPulling="2025-11-30 09:21:19.814567278 +0000 UTC m=+9300.582738887" lastFinishedPulling="2025-11-30 09:21:23.669312759 +0000 UTC m=+9304.437484368" observedRunningTime="2025-11-30 09:21:23.897210396 +0000 UTC m=+9304.665382005" watchObservedRunningTime="2025-11-30 09:21:23.908149184 +0000 UTC m=+9304.676320793" Nov 30 09:21:28 crc kubenswrapper[4941]: I1130 09:21:28.271574 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:28 crc kubenswrapper[4941]: I1130 09:21:28.272194 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:28 crc kubenswrapper[4941]: I1130 09:21:28.332031 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:28 crc kubenswrapper[4941]: I1130 09:21:28.992126 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:29 crc kubenswrapper[4941]: I1130 09:21:29.051704 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g96qr"] Nov 30 09:21:30 crc kubenswrapper[4941]: I1130 09:21:30.952717 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g96qr" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="registry-server" containerID="cri-o://9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4" gracePeriod=2 Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.806371 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.941958 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-utilities\") pod \"a8216140-7b27-475f-8b09-2614997b3c31\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.942376 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snpjb\" (UniqueName: \"kubernetes.io/projected/a8216140-7b27-475f-8b09-2614997b3c31-kube-api-access-snpjb\") pod \"a8216140-7b27-475f-8b09-2614997b3c31\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.942524 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-catalog-content\") pod \"a8216140-7b27-475f-8b09-2614997b3c31\" (UID: \"a8216140-7b27-475f-8b09-2614997b3c31\") " Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.942732 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-utilities" (OuterVolumeSpecName: "utilities") pod "a8216140-7b27-475f-8b09-2614997b3c31" (UID: "a8216140-7b27-475f-8b09-2614997b3c31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.943346 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.963841 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8216140-7b27-475f-8b09-2614997b3c31" (UID: "a8216140-7b27-475f-8b09-2614997b3c31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.970130 4941 generic.go:334] "Generic (PLEG): container finished" podID="a8216140-7b27-475f-8b09-2614997b3c31" containerID="9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4" exitCode=0 Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.970225 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g96qr" Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.970253 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerDied","Data":"9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4"} Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.971066 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g96qr" event={"ID":"a8216140-7b27-475f-8b09-2614997b3c31","Type":"ContainerDied","Data":"8b1d7f2e969917c66c084b028e843d671d3d680fcfe89d03040270e35c7558bd"} Nov 30 09:21:31 crc kubenswrapper[4941]: I1130 09:21:31.971116 4941 scope.go:117] "RemoveContainer" containerID="9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.012398 4941 scope.go:117] "RemoveContainer" containerID="d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.045292 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8216140-7b27-475f-8b09-2614997b3c31-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.495686 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8216140-7b27-475f-8b09-2614997b3c31-kube-api-access-snpjb" (OuterVolumeSpecName: "kube-api-access-snpjb") pod "a8216140-7b27-475f-8b09-2614997b3c31" (UID: "a8216140-7b27-475f-8b09-2614997b3c31"). InnerVolumeSpecName "kube-api-access-snpjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.511722 4941 scope.go:117] "RemoveContainer" containerID="ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.557069 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snpjb\" (UniqueName: \"kubernetes.io/projected/a8216140-7b27-475f-8b09-2614997b3c31-kube-api-access-snpjb\") on node \"crc\" DevicePath \"\"" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.618398 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g96qr"] Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.627862 4941 scope.go:117] "RemoveContainer" containerID="9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4" Nov 30 09:21:32 crc kubenswrapper[4941]: E1130 09:21:32.629039 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4\": container with ID starting with 9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4 not found: ID does not exist" containerID="9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.629082 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4"} err="failed to get container status \"9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4\": rpc error: code = NotFound desc = could not find container \"9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4\": container with ID starting with 9f07ca6334f1c52db61ca5eaca83ea9cf22fa375af3849c14f321a0d6b061cb4 not found: ID does not exist" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.629107 4941 scope.go:117] "RemoveContainer" containerID="d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e" Nov 30 09:21:32 crc kubenswrapper[4941]: E1130 09:21:32.629351 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e\": container with ID starting with d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e not found: ID does not exist" containerID="d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.629374 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e"} err="failed to get container status \"d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e\": rpc error: code = NotFound desc = could not find container \"d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e\": container with ID starting with d54a44861536bfb1dfad4e317071f969606a38ea90c59546672070469a3cae2e not found: ID does not exist" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.629387 4941 scope.go:117] "RemoveContainer" containerID="ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace" Nov 30 09:21:32 crc kubenswrapper[4941]: E1130 09:21:32.633450 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace\": container with ID starting with ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace not found: ID does not exist" containerID="ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.633493 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace"} err="failed to get container status \"ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace\": rpc error: code = NotFound desc = could not find container \"ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace\": container with ID starting with ca6014729e05d3f1229461e13659869cb46bd54bfc5544d26936a0957f9ebace not found: ID does not exist" Nov 30 09:21:32 crc kubenswrapper[4941]: I1130 09:21:32.636106 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g96qr"] Nov 30 09:21:33 crc kubenswrapper[4941]: I1130 09:21:33.535996 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8216140-7b27-475f-8b09-2614997b3c31" path="/var/lib/kubelet/pods/a8216140-7b27-475f-8b09-2614997b3c31/volumes" Nov 30 09:22:02 crc kubenswrapper[4941]: I1130 09:22:02.979533 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:22:02 crc kubenswrapper[4941]: I1130 09:22:02.980115 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:22:32 crc kubenswrapper[4941]: I1130 09:22:32.978926 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:22:32 crc kubenswrapper[4941]: I1130 09:22:32.979613 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:23:02 crc kubenswrapper[4941]: I1130 09:23:02.978352 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:23:02 crc kubenswrapper[4941]: I1130 09:23:02.978907 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:23:02 crc kubenswrapper[4941]: I1130 09:23:02.978968 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:23:02 crc kubenswrapper[4941]: I1130 09:23:02.979998 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd03ca3f2fb180c206b843dd0d38c762f5d17c6bb7f0ce3326052857e4af3e3a"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:23:02 crc kubenswrapper[4941]: I1130 09:23:02.980060 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://fd03ca3f2fb180c206b843dd0d38c762f5d17c6bb7f0ce3326052857e4af3e3a" gracePeriod=600 Nov 30 09:23:03 crc kubenswrapper[4941]: I1130 09:23:03.284921 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="fd03ca3f2fb180c206b843dd0d38c762f5d17c6bb7f0ce3326052857e4af3e3a" exitCode=0 Nov 30 09:23:03 crc kubenswrapper[4941]: I1130 09:23:03.285065 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"fd03ca3f2fb180c206b843dd0d38c762f5d17c6bb7f0ce3326052857e4af3e3a"} Nov 30 09:23:03 crc kubenswrapper[4941]: I1130 09:23:03.285448 4941 scope.go:117] "RemoveContainer" containerID="03228af2adf2140b6ac3299e7bdf7bfe23e5e14a9e1e4edeaf8803763cfb26ce" Nov 30 09:23:04 crc kubenswrapper[4941]: I1130 09:23:04.301400 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5"} Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.182567 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4ckxv"] Nov 30 09:25:21 crc kubenswrapper[4941]: E1130 09:25:21.183804 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="extract-content" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.183823 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="extract-content" Nov 30 09:25:21 crc kubenswrapper[4941]: E1130 09:25:21.183857 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="extract-utilities" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.183866 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="extract-utilities" Nov 30 09:25:21 crc kubenswrapper[4941]: E1130 09:25:21.183874 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="registry-server" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.183883 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="registry-server" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.184142 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8216140-7b27-475f-8b09-2614997b3c31" containerName="registry-server" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.185993 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.198122 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4ckxv"] Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.361805 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-utilities\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.362819 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-catalog-content\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.363031 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvbqg\" (UniqueName: \"kubernetes.io/projected/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-kube-api-access-qvbqg\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.465647 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-catalog-content\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.465718 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvbqg\" (UniqueName: \"kubernetes.io/projected/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-kube-api-access-qvbqg\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.465800 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-utilities\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.466477 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-utilities\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.466675 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-catalog-content\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.489231 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvbqg\" (UniqueName: \"kubernetes.io/projected/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-kube-api-access-qvbqg\") pod \"community-operators-4ckxv\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:21 crc kubenswrapper[4941]: I1130 09:25:21.527868 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:22 crc kubenswrapper[4941]: I1130 09:25:22.094218 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4ckxv"] Nov 30 09:25:22 crc kubenswrapper[4941]: I1130 09:25:22.858726 4941 generic.go:334] "Generic (PLEG): container finished" podID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerID="206af3fd3908b13b7d474a45261424e827fe62535805b413ccce18e356267e6f" exitCode=0 Nov 30 09:25:22 crc kubenswrapper[4941]: I1130 09:25:22.859019 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerDied","Data":"206af3fd3908b13b7d474a45261424e827fe62535805b413ccce18e356267e6f"} Nov 30 09:25:22 crc kubenswrapper[4941]: I1130 09:25:22.859054 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerStarted","Data":"5cc04354a9b00c49203da9ced8bf4059c556008fad5cc9836db5f5d3dab776d7"} Nov 30 09:25:22 crc kubenswrapper[4941]: I1130 09:25:22.861787 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 09:25:23 crc kubenswrapper[4941]: I1130 09:25:23.874400 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerStarted","Data":"27f1ce78198cb21d874f4b631d401996ff96be5d5359dc79ef225870042b11a1"} Nov 30 09:25:24 crc kubenswrapper[4941]: I1130 09:25:24.886800 4941 generic.go:334] "Generic (PLEG): container finished" podID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerID="27f1ce78198cb21d874f4b631d401996ff96be5d5359dc79ef225870042b11a1" exitCode=0 Nov 30 09:25:24 crc kubenswrapper[4941]: I1130 09:25:24.887040 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerDied","Data":"27f1ce78198cb21d874f4b631d401996ff96be5d5359dc79ef225870042b11a1"} Nov 30 09:25:25 crc kubenswrapper[4941]: I1130 09:25:25.902262 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerStarted","Data":"21a1f9e8c8d9e8fcea24157b23cf7a4315dc2e4e36e58b03a44efeedbc2c143c"} Nov 30 09:25:25 crc kubenswrapper[4941]: I1130 09:25:25.938746 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4ckxv" podStartSLOduration=2.440811371 podStartE2EDuration="4.938718019s" podCreationTimestamp="2025-11-30 09:25:21 +0000 UTC" firstStartedPulling="2025-11-30 09:25:22.86148845 +0000 UTC m=+9543.629660069" lastFinishedPulling="2025-11-30 09:25:25.359395098 +0000 UTC m=+9546.127566717" observedRunningTime="2025-11-30 09:25:25.929539206 +0000 UTC m=+9546.697710815" watchObservedRunningTime="2025-11-30 09:25:25.938718019 +0000 UTC m=+9546.706889638" Nov 30 09:25:31 crc kubenswrapper[4941]: I1130 09:25:31.536457 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:31 crc kubenswrapper[4941]: I1130 09:25:31.537113 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:31 crc kubenswrapper[4941]: I1130 09:25:31.579390 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:32 crc kubenswrapper[4941]: I1130 09:25:32.391134 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:32 crc kubenswrapper[4941]: I1130 09:25:32.474120 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4ckxv"] Nov 30 09:25:32 crc kubenswrapper[4941]: I1130 09:25:32.978636 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:25:32 crc kubenswrapper[4941]: I1130 09:25:32.978995 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:25:33 crc kubenswrapper[4941]: I1130 09:25:33.986966 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4ckxv" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="registry-server" containerID="cri-o://21a1f9e8c8d9e8fcea24157b23cf7a4315dc2e4e36e58b03a44efeedbc2c143c" gracePeriod=2 Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:34.999810 4941 generic.go:334] "Generic (PLEG): container finished" podID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerID="21a1f9e8c8d9e8fcea24157b23cf7a4315dc2e4e36e58b03a44efeedbc2c143c" exitCode=0 Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:34.999895 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerDied","Data":"21a1f9e8c8d9e8fcea24157b23cf7a4315dc2e4e36e58b03a44efeedbc2c143c"} Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.743222 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.828455 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-catalog-content\") pod \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.828532 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvbqg\" (UniqueName: \"kubernetes.io/projected/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-kube-api-access-qvbqg\") pod \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.828685 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-utilities\") pod \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\" (UID: \"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8\") " Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.829722 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-utilities" (OuterVolumeSpecName: "utilities") pod "e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" (UID: "e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.837958 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-kube-api-access-qvbqg" (OuterVolumeSpecName: "kube-api-access-qvbqg") pod "e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" (UID: "e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8"). InnerVolumeSpecName "kube-api-access-qvbqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.881308 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" (UID: "e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.931276 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.931314 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvbqg\" (UniqueName: \"kubernetes.io/projected/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-kube-api-access-qvbqg\") on node \"crc\" DevicePath \"\"" Nov 30 09:25:35 crc kubenswrapper[4941]: I1130 09:25:35.931363 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.014182 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4ckxv" event={"ID":"e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8","Type":"ContainerDied","Data":"5cc04354a9b00c49203da9ced8bf4059c556008fad5cc9836db5f5d3dab776d7"} Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.014274 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4ckxv" Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.014282 4941 scope.go:117] "RemoveContainer" containerID="21a1f9e8c8d9e8fcea24157b23cf7a4315dc2e4e36e58b03a44efeedbc2c143c" Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.057372 4941 scope.go:117] "RemoveContainer" containerID="27f1ce78198cb21d874f4b631d401996ff96be5d5359dc79ef225870042b11a1" Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.067755 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4ckxv"] Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.078548 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4ckxv"] Nov 30 09:25:36 crc kubenswrapper[4941]: I1130 09:25:36.098458 4941 scope.go:117] "RemoveContainer" containerID="206af3fd3908b13b7d474a45261424e827fe62535805b413ccce18e356267e6f" Nov 30 09:25:37 crc kubenswrapper[4941]: I1130 09:25:37.535242 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" path="/var/lib/kubelet/pods/e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8/volumes" Nov 30 09:26:02 crc kubenswrapper[4941]: I1130 09:26:02.978731 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:26:02 crc kubenswrapper[4941]: I1130 09:26:02.979310 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:26:32 crc kubenswrapper[4941]: I1130 09:26:32.978666 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:26:32 crc kubenswrapper[4941]: I1130 09:26:32.979340 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:26:32 crc kubenswrapper[4941]: I1130 09:26:32.979399 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:26:32 crc kubenswrapper[4941]: I1130 09:26:32.980271 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:26:32 crc kubenswrapper[4941]: I1130 09:26:32.980342 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" gracePeriod=600 Nov 30 09:26:33 crc kubenswrapper[4941]: E1130 09:26:33.112382 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:26:33 crc kubenswrapper[4941]: I1130 09:26:33.680211 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" exitCode=0 Nov 30 09:26:33 crc kubenswrapper[4941]: I1130 09:26:33.680261 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5"} Nov 30 09:26:33 crc kubenswrapper[4941]: I1130 09:26:33.680299 4941 scope.go:117] "RemoveContainer" containerID="fd03ca3f2fb180c206b843dd0d38c762f5d17c6bb7f0ce3326052857e4af3e3a" Nov 30 09:26:33 crc kubenswrapper[4941]: I1130 09:26:33.681459 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:26:33 crc kubenswrapper[4941]: E1130 09:26:33.681981 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:26:45 crc kubenswrapper[4941]: I1130 09:26:45.523565 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:26:45 crc kubenswrapper[4941]: E1130 09:26:45.525489 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:26:57 crc kubenswrapper[4941]: I1130 09:26:57.522800 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:26:57 crc kubenswrapper[4941]: E1130 09:26:57.523516 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:27:08 crc kubenswrapper[4941]: I1130 09:27:08.523483 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:27:08 crc kubenswrapper[4941]: E1130 09:27:08.524547 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:27:19 crc kubenswrapper[4941]: I1130 09:27:19.534542 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:27:19 crc kubenswrapper[4941]: E1130 09:27:19.536111 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.522632 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:27:33 crc kubenswrapper[4941]: E1130 09:27:33.523891 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.914771 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kkqmc"] Nov 30 09:27:33 crc kubenswrapper[4941]: E1130 09:27:33.915841 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="extract-content" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.915866 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="extract-content" Nov 30 09:27:33 crc kubenswrapper[4941]: E1130 09:27:33.915886 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="registry-server" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.915894 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="registry-server" Nov 30 09:27:33 crc kubenswrapper[4941]: E1130 09:27:33.915908 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="extract-utilities" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.915916 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="extract-utilities" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.916143 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e06f5634-d4d5-40b5-a86c-df9f2d7b0dc8" containerName="registry-server" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.917929 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:33 crc kubenswrapper[4941]: I1130 09:27:33.929220 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kkqmc"] Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.093585 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-utilities\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.093678 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqvsk\" (UniqueName: \"kubernetes.io/projected/e31e6140-410f-480e-b58c-bec336e6e911-kube-api-access-dqvsk\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.093796 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-catalog-content\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.195815 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-catalog-content\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.195926 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-utilities\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.195992 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqvsk\" (UniqueName: \"kubernetes.io/projected/e31e6140-410f-480e-b58c-bec336e6e911-kube-api-access-dqvsk\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.196913 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-catalog-content\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.197190 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-utilities\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.225483 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqvsk\" (UniqueName: \"kubernetes.io/projected/e31e6140-410f-480e-b58c-bec336e6e911-kube-api-access-dqvsk\") pod \"redhat-operators-kkqmc\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.253554 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:34 crc kubenswrapper[4941]: I1130 09:27:34.771985 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kkqmc"] Nov 30 09:27:35 crc kubenswrapper[4941]: I1130 09:27:35.425945 4941 generic.go:334] "Generic (PLEG): container finished" podID="e31e6140-410f-480e-b58c-bec336e6e911" containerID="672d9a233c8caca15c5efc125274320e2e7ce61308c471812de03d4c0c69b03a" exitCode=0 Nov 30 09:27:35 crc kubenswrapper[4941]: I1130 09:27:35.425994 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerDied","Data":"672d9a233c8caca15c5efc125274320e2e7ce61308c471812de03d4c0c69b03a"} Nov 30 09:27:35 crc kubenswrapper[4941]: I1130 09:27:35.426361 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerStarted","Data":"14a9b30b7cc476a8a9aae1ab179b6b60a0e25d4d085551157895dfc6aa74b179"} Nov 30 09:27:36 crc kubenswrapper[4941]: I1130 09:27:36.437892 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerStarted","Data":"65294ab53d409b16a9d420759301b2c3a393193cf4523bbcfde1aebc15a57d73"} Nov 30 09:27:38 crc kubenswrapper[4941]: I1130 09:27:38.476612 4941 generic.go:334] "Generic (PLEG): container finished" podID="e31e6140-410f-480e-b58c-bec336e6e911" containerID="65294ab53d409b16a9d420759301b2c3a393193cf4523bbcfde1aebc15a57d73" exitCode=0 Nov 30 09:27:38 crc kubenswrapper[4941]: I1130 09:27:38.476721 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerDied","Data":"65294ab53d409b16a9d420759301b2c3a393193cf4523bbcfde1aebc15a57d73"} Nov 30 09:27:39 crc kubenswrapper[4941]: I1130 09:27:39.496461 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerStarted","Data":"4a8bfdebbe60ee607e8ced79fec5f422ff8d1891a351baa0f64e4e4fcb20eacc"} Nov 30 09:27:39 crc kubenswrapper[4941]: I1130 09:27:39.533769 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kkqmc" podStartSLOduration=3.052177419 podStartE2EDuration="6.533744201s" podCreationTimestamp="2025-11-30 09:27:33 +0000 UTC" firstStartedPulling="2025-11-30 09:27:35.429179544 +0000 UTC m=+9676.197351153" lastFinishedPulling="2025-11-30 09:27:38.910746336 +0000 UTC m=+9679.678917935" observedRunningTime="2025-11-30 09:27:39.519887302 +0000 UTC m=+9680.288058931" watchObservedRunningTime="2025-11-30 09:27:39.533744201 +0000 UTC m=+9680.301915800" Nov 30 09:27:44 crc kubenswrapper[4941]: I1130 09:27:44.254583 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:44 crc kubenswrapper[4941]: I1130 09:27:44.255126 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:45 crc kubenswrapper[4941]: I1130 09:27:45.321570 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kkqmc" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="registry-server" probeResult="failure" output=< Nov 30 09:27:45 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 09:27:45 crc kubenswrapper[4941]: > Nov 30 09:27:48 crc kubenswrapper[4941]: I1130 09:27:48.521625 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:27:48 crc kubenswrapper[4941]: E1130 09:27:48.522580 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:27:54 crc kubenswrapper[4941]: I1130 09:27:54.335004 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:54 crc kubenswrapper[4941]: I1130 09:27:54.400805 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:54 crc kubenswrapper[4941]: I1130 09:27:54.579722 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kkqmc"] Nov 30 09:27:54 crc kubenswrapper[4941]: I1130 09:27:54.673359 4941 generic.go:334] "Generic (PLEG): container finished" podID="8501edc2-e733-4e75-9afd-ecefc4f74de2" containerID="a16952e422a63e634037765ddffbfdb715e56abde8c12cef9431bdd26ccf275f" exitCode=0 Nov 30 09:27:54 crc kubenswrapper[4941]: I1130 09:27:54.673723 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8501edc2-e733-4e75-9afd-ecefc4f74de2","Type":"ContainerDied","Data":"a16952e422a63e634037765ddffbfdb715e56abde8c12cef9431bdd26ccf275f"} Nov 30 09:27:55 crc kubenswrapper[4941]: I1130 09:27:55.683146 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kkqmc" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="registry-server" containerID="cri-o://4a8bfdebbe60ee607e8ced79fec5f422ff8d1891a351baa0f64e4e4fcb20eacc" gracePeriod=2 Nov 30 09:27:56 crc kubenswrapper[4941]: I1130 09:27:56.697499 4941 generic.go:334] "Generic (PLEG): container finished" podID="e31e6140-410f-480e-b58c-bec336e6e911" containerID="4a8bfdebbe60ee607e8ced79fec5f422ff8d1891a351baa0f64e4e4fcb20eacc" exitCode=0 Nov 30 09:27:56 crc kubenswrapper[4941]: I1130 09:27:56.697563 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerDied","Data":"4a8bfdebbe60ee607e8ced79fec5f422ff8d1891a351baa0f64e4e4fcb20eacc"} Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.107052 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.115341 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.172914 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ssh-key\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.172968 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-temporary\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173014 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-config-data\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173091 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173121 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ca-certs\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173159 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45glb\" (UniqueName: \"kubernetes.io/projected/8501edc2-e733-4e75-9afd-ecefc4f74de2-kube-api-access-45glb\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173193 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-utilities\") pod \"e31e6140-410f-480e-b58c-bec336e6e911\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173284 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173396 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config-secret\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173422 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-catalog-content\") pod \"e31e6140-410f-480e-b58c-bec336e6e911\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173438 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqvsk\" (UniqueName: \"kubernetes.io/projected/e31e6140-410f-480e-b58c-bec336e6e911-kube-api-access-dqvsk\") pod \"e31e6140-410f-480e-b58c-bec336e6e911\" (UID: \"e31e6140-410f-480e-b58c-bec336e6e911\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.173474 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-workdir\") pod \"8501edc2-e733-4e75-9afd-ecefc4f74de2\" (UID: \"8501edc2-e733-4e75-9afd-ecefc4f74de2\") " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.179318 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.179894 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-config-data" (OuterVolumeSpecName: "config-data") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.181308 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-utilities" (OuterVolumeSpecName: "utilities") pod "e31e6140-410f-480e-b58c-bec336e6e911" (UID: "e31e6140-410f-480e-b58c-bec336e6e911"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.184403 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e31e6140-410f-480e-b58c-bec336e6e911-kube-api-access-dqvsk" (OuterVolumeSpecName: "kube-api-access-dqvsk") pod "e31e6140-410f-480e-b58c-bec336e6e911" (UID: "e31e6140-410f-480e-b58c-bec336e6e911"). InnerVolumeSpecName "kube-api-access-dqvsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.188507 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.190931 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "test-operator-logs") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.209152 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8501edc2-e733-4e75-9afd-ecefc4f74de2-kube-api-access-45glb" (OuterVolumeSpecName: "kube-api-access-45glb") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "kube-api-access-45glb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.235800 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.237681 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.248166 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.274163 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "8501edc2-e733-4e75-9afd-ecefc4f74de2" (UID: "8501edc2-e733-4e75-9afd-ecefc4f74de2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276266 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276296 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqvsk\" (UniqueName: \"kubernetes.io/projected/e31e6140-410f-480e-b58c-bec336e6e911-kube-api-access-dqvsk\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276307 4941 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276339 4941 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/8501edc2-e733-4e75-9afd-ecefc4f74de2-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276358 4941 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276367 4941 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-config-data\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276399 4941 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276408 4941 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/8501edc2-e733-4e75-9afd-ecefc4f74de2-ca-certs\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276417 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45glb\" (UniqueName: \"kubernetes.io/projected/8501edc2-e733-4e75-9afd-ecefc4f74de2-kube-api-access-45glb\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276426 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.276434 4941 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8501edc2-e733-4e75-9afd-ecefc4f74de2-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.304912 4941 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.321550 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e31e6140-410f-480e-b58c-bec336e6e911" (UID: "e31e6140-410f-480e-b58c-bec336e6e911"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.378311 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e31e6140-410f-480e-b58c-bec336e6e911-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.378406 4941 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.714069 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"8501edc2-e733-4e75-9afd-ecefc4f74de2","Type":"ContainerDied","Data":"540106f0d7985720d08eb269bb207cc017c0b727e28f0c1e059ef2b799bbccc8"} Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.714591 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="540106f0d7985720d08eb269bb207cc017c0b727e28f0c1e059ef2b799bbccc8" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.714155 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.719423 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kkqmc" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.719439 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kkqmc" event={"ID":"e31e6140-410f-480e-b58c-bec336e6e911","Type":"ContainerDied","Data":"14a9b30b7cc476a8a9aae1ab179b6b60a0e25d4d085551157895dfc6aa74b179"} Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.719497 4941 scope.go:117] "RemoveContainer" containerID="4a8bfdebbe60ee607e8ced79fec5f422ff8d1891a351baa0f64e4e4fcb20eacc" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.753984 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kkqmc"] Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.764762 4941 scope.go:117] "RemoveContainer" containerID="65294ab53d409b16a9d420759301b2c3a393193cf4523bbcfde1aebc15a57d73" Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.768539 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kkqmc"] Nov 30 09:27:57 crc kubenswrapper[4941]: I1130 09:27:57.793186 4941 scope.go:117] "RemoveContainer" containerID="672d9a233c8caca15c5efc125274320e2e7ce61308c471812de03d4c0c69b03a" Nov 30 09:27:59 crc kubenswrapper[4941]: I1130 09:27:59.536702 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e31e6140-410f-480e-b58c-bec336e6e911" path="/var/lib/kubelet/pods/e31e6140-410f-480e-b58c-bec336e6e911/volumes" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.406417 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 30 09:28:01 crc kubenswrapper[4941]: E1130 09:28:01.407372 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8501edc2-e733-4e75-9afd-ecefc4f74de2" containerName="tempest-tests-tempest-tests-runner" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.407402 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="8501edc2-e733-4e75-9afd-ecefc4f74de2" containerName="tempest-tests-tempest-tests-runner" Nov 30 09:28:01 crc kubenswrapper[4941]: E1130 09:28:01.407441 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="extract-content" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.407455 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="extract-content" Nov 30 09:28:01 crc kubenswrapper[4941]: E1130 09:28:01.407481 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="extract-utilities" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.407490 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="extract-utilities" Nov 30 09:28:01 crc kubenswrapper[4941]: E1130 09:28:01.407524 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="registry-server" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.407531 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="registry-server" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.407744 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="8501edc2-e733-4e75-9afd-ecefc4f74de2" containerName="tempest-tests-tempest-tests-runner" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.407761 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="e31e6140-410f-480e-b58c-bec336e6e911" containerName="registry-server" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.408725 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.411745 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-59s4s" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.421117 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.482129 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.482281 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgkk5\" (UniqueName: \"kubernetes.io/projected/4341309d-299a-4004-b88a-add57d4ea72f-kube-api-access-lgkk5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.584520 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgkk5\" (UniqueName: \"kubernetes.io/projected/4341309d-299a-4004-b88a-add57d4ea72f-kube-api-access-lgkk5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.584702 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.585065 4941 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.605001 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgkk5\" (UniqueName: \"kubernetes.io/projected/4341309d-299a-4004-b88a-add57d4ea72f-kube-api-access-lgkk5\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.615014 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"4341309d-299a-4004-b88a-add57d4ea72f\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:01 crc kubenswrapper[4941]: I1130 09:28:01.738985 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Nov 30 09:28:02 crc kubenswrapper[4941]: I1130 09:28:02.224037 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Nov 30 09:28:02 crc kubenswrapper[4941]: I1130 09:28:02.778991 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"4341309d-299a-4004-b88a-add57d4ea72f","Type":"ContainerStarted","Data":"5f26c2317fda73a386baf23aaa90dc34cf68dae0c354cc4467864e79b211b619"} Nov 30 09:28:03 crc kubenswrapper[4941]: I1130 09:28:03.522158 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:28:03 crc kubenswrapper[4941]: E1130 09:28:03.522734 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:28:03 crc kubenswrapper[4941]: I1130 09:28:03.792854 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"4341309d-299a-4004-b88a-add57d4ea72f","Type":"ContainerStarted","Data":"f6d15bbe656dfd6a2ca73f87b1f3daced6a64128c72fca65df0272fc9d2878c2"} Nov 30 09:28:03 crc kubenswrapper[4941]: I1130 09:28:03.813938 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.845069539 podStartE2EDuration="2.813907274s" podCreationTimestamp="2025-11-30 09:28:01 +0000 UTC" firstStartedPulling="2025-11-30 09:28:02.227105772 +0000 UTC m=+9702.995277401" lastFinishedPulling="2025-11-30 09:28:03.195943527 +0000 UTC m=+9703.964115136" observedRunningTime="2025-11-30 09:28:03.812245752 +0000 UTC m=+9704.580417371" watchObservedRunningTime="2025-11-30 09:28:03.813907274 +0000 UTC m=+9704.582078893" Nov 30 09:28:17 crc kubenswrapper[4941]: I1130 09:28:17.521653 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:28:17 crc kubenswrapper[4941]: E1130 09:28:17.522929 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:28:29 crc kubenswrapper[4941]: I1130 09:28:29.529714 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:28:29 crc kubenswrapper[4941]: E1130 09:28:29.530659 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:28:43 crc kubenswrapper[4941]: I1130 09:28:43.522575 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:28:43 crc kubenswrapper[4941]: E1130 09:28:43.523421 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:28:58 crc kubenswrapper[4941]: I1130 09:28:58.522100 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:28:58 crc kubenswrapper[4941]: E1130 09:28:58.522824 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.166991 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-z2zpj/must-gather-28nwg"] Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.169810 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.172097 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-z2zpj"/"openshift-service-ca.crt" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.172243 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-z2zpj"/"default-dockercfg-p9m96" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.177259 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-z2zpj"/"kube-root-ca.crt" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.193124 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-z2zpj/must-gather-28nwg"] Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.289747 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4615cc80-a7a5-4bee-928f-bf978c248d8d-must-gather-output\") pod \"must-gather-28nwg\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.289950 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szd9c\" (UniqueName: \"kubernetes.io/projected/4615cc80-a7a5-4bee-928f-bf978c248d8d-kube-api-access-szd9c\") pod \"must-gather-28nwg\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.391915 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szd9c\" (UniqueName: \"kubernetes.io/projected/4615cc80-a7a5-4bee-928f-bf978c248d8d-kube-api-access-szd9c\") pod \"must-gather-28nwg\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.392028 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4615cc80-a7a5-4bee-928f-bf978c248d8d-must-gather-output\") pod \"must-gather-28nwg\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.392794 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4615cc80-a7a5-4bee-928f-bf978c248d8d-must-gather-output\") pod \"must-gather-28nwg\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.437917 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szd9c\" (UniqueName: \"kubernetes.io/projected/4615cc80-a7a5-4bee-928f-bf978c248d8d-kube-api-access-szd9c\") pod \"must-gather-28nwg\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.499274 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:29:10 crc kubenswrapper[4941]: I1130 09:29:10.522467 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:29:10 crc kubenswrapper[4941]: E1130 09:29:10.523171 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:29:11 crc kubenswrapper[4941]: I1130 09:29:11.040438 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-z2zpj/must-gather-28nwg"] Nov 30 09:29:11 crc kubenswrapper[4941]: I1130 09:29:11.539102 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/must-gather-28nwg" event={"ID":"4615cc80-a7a5-4bee-928f-bf978c248d8d","Type":"ContainerStarted","Data":"db0519a6898aefb47da51ff07b25168acaef5182a26bfe053906523d92bafce5"} Nov 30 09:29:16 crc kubenswrapper[4941]: I1130 09:29:16.614085 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/must-gather-28nwg" event={"ID":"4615cc80-a7a5-4bee-928f-bf978c248d8d","Type":"ContainerStarted","Data":"ad7d847fb41ebd999ef658eed6127ef7ed7283ac1e307c67a3cdcd880a6991f3"} Nov 30 09:29:16 crc kubenswrapper[4941]: I1130 09:29:16.614676 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/must-gather-28nwg" event={"ID":"4615cc80-a7a5-4bee-928f-bf978c248d8d","Type":"ContainerStarted","Data":"0a66335a296f4691caed04ecbae0d10673a73c80b4dc1f2e40ff1f86c85a2177"} Nov 30 09:29:16 crc kubenswrapper[4941]: I1130 09:29:16.648040 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-z2zpj/must-gather-28nwg" podStartSLOduration=2.18700826 podStartE2EDuration="6.648006575s" podCreationTimestamp="2025-11-30 09:29:10 +0000 UTC" firstStartedPulling="2025-11-30 09:29:11.047624804 +0000 UTC m=+9771.815796413" lastFinishedPulling="2025-11-30 09:29:15.508623119 +0000 UTC m=+9776.276794728" observedRunningTime="2025-11-30 09:29:16.6355723 +0000 UTC m=+9777.403743899" watchObservedRunningTime="2025-11-30 09:29:16.648006575 +0000 UTC m=+9777.416178184" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.758273 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-z2zpj/crc-debug-2kqrs"] Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.760384 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.845975 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d299f6ec-1682-4331-bf74-b715b0dc58a2-host\") pod \"crc-debug-2kqrs\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.846044 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vft7\" (UniqueName: \"kubernetes.io/projected/d299f6ec-1682-4331-bf74-b715b0dc58a2-kube-api-access-2vft7\") pod \"crc-debug-2kqrs\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.948967 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vft7\" (UniqueName: \"kubernetes.io/projected/d299f6ec-1682-4331-bf74-b715b0dc58a2-kube-api-access-2vft7\") pod \"crc-debug-2kqrs\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.949096 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d299f6ec-1682-4331-bf74-b715b0dc58a2-host\") pod \"crc-debug-2kqrs\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.949278 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d299f6ec-1682-4331-bf74-b715b0dc58a2-host\") pod \"crc-debug-2kqrs\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:20 crc kubenswrapper[4941]: I1130 09:29:20.973180 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vft7\" (UniqueName: \"kubernetes.io/projected/d299f6ec-1682-4331-bf74-b715b0dc58a2-kube-api-access-2vft7\") pod \"crc-debug-2kqrs\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:21 crc kubenswrapper[4941]: I1130 09:29:21.079948 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:21 crc kubenswrapper[4941]: I1130 09:29:21.679054 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" event={"ID":"d299f6ec-1682-4331-bf74-b715b0dc58a2","Type":"ContainerStarted","Data":"05ac20d32a41fc92f63af95cf5cdc0759d2e0db17c0968ab071ada733c976180"} Nov 30 09:29:22 crc kubenswrapper[4941]: I1130 09:29:22.521714 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:29:22 crc kubenswrapper[4941]: E1130 09:29:22.540992 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:29:33 crc kubenswrapper[4941]: I1130 09:29:33.521986 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:29:33 crc kubenswrapper[4941]: E1130 09:29:33.522931 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:29:34 crc kubenswrapper[4941]: I1130 09:29:34.887774 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" event={"ID":"d299f6ec-1682-4331-bf74-b715b0dc58a2","Type":"ContainerStarted","Data":"54e9ea0961143e049825abcf1f36ffed71351b657a72d882b8373c5aac20c186"} Nov 30 09:29:34 crc kubenswrapper[4941]: I1130 09:29:34.915720 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" podStartSLOduration=1.795482379 podStartE2EDuration="14.915700816s" podCreationTimestamp="2025-11-30 09:29:20 +0000 UTC" firstStartedPulling="2025-11-30 09:29:21.1225707 +0000 UTC m=+9781.890742309" lastFinishedPulling="2025-11-30 09:29:34.242789137 +0000 UTC m=+9795.010960746" observedRunningTime="2025-11-30 09:29:34.90775906 +0000 UTC m=+9795.675930669" watchObservedRunningTime="2025-11-30 09:29:34.915700816 +0000 UTC m=+9795.683872425" Nov 30 09:29:48 crc kubenswrapper[4941]: I1130 09:29:48.522614 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:29:48 crc kubenswrapper[4941]: E1130 09:29:48.524403 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:29:51 crc kubenswrapper[4941]: I1130 09:29:51.058474 4941 generic.go:334] "Generic (PLEG): container finished" podID="d299f6ec-1682-4331-bf74-b715b0dc58a2" containerID="54e9ea0961143e049825abcf1f36ffed71351b657a72d882b8373c5aac20c186" exitCode=0 Nov 30 09:29:51 crc kubenswrapper[4941]: I1130 09:29:51.058551 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" event={"ID":"d299f6ec-1682-4331-bf74-b715b0dc58a2","Type":"ContainerDied","Data":"54e9ea0961143e049825abcf1f36ffed71351b657a72d882b8373c5aac20c186"} Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.206980 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.246127 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-z2zpj/crc-debug-2kqrs"] Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.256961 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-z2zpj/crc-debug-2kqrs"] Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.324299 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d299f6ec-1682-4331-bf74-b715b0dc58a2-host\") pod \"d299f6ec-1682-4331-bf74-b715b0dc58a2\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.324446 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d299f6ec-1682-4331-bf74-b715b0dc58a2-host" (OuterVolumeSpecName: "host") pod "d299f6ec-1682-4331-bf74-b715b0dc58a2" (UID: "d299f6ec-1682-4331-bf74-b715b0dc58a2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.324494 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vft7\" (UniqueName: \"kubernetes.io/projected/d299f6ec-1682-4331-bf74-b715b0dc58a2-kube-api-access-2vft7\") pod \"d299f6ec-1682-4331-bf74-b715b0dc58a2\" (UID: \"d299f6ec-1682-4331-bf74-b715b0dc58a2\") " Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.325165 4941 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d299f6ec-1682-4331-bf74-b715b0dc58a2-host\") on node \"crc\" DevicePath \"\"" Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.331274 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d299f6ec-1682-4331-bf74-b715b0dc58a2-kube-api-access-2vft7" (OuterVolumeSpecName: "kube-api-access-2vft7") pod "d299f6ec-1682-4331-bf74-b715b0dc58a2" (UID: "d299f6ec-1682-4331-bf74-b715b0dc58a2"). InnerVolumeSpecName "kube-api-access-2vft7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:29:52 crc kubenswrapper[4941]: I1130 09:29:52.429096 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vft7\" (UniqueName: \"kubernetes.io/projected/d299f6ec-1682-4331-bf74-b715b0dc58a2-kube-api-access-2vft7\") on node \"crc\" DevicePath \"\"" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.078783 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05ac20d32a41fc92f63af95cf5cdc0759d2e0db17c0968ab071ada733c976180" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.078846 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-2kqrs" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.536548 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d299f6ec-1682-4331-bf74-b715b0dc58a2" path="/var/lib/kubelet/pods/d299f6ec-1682-4331-bf74-b715b0dc58a2/volumes" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.797063 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-z2zpj/crc-debug-dqp2h"] Nov 30 09:29:53 crc kubenswrapper[4941]: E1130 09:29:53.797959 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d299f6ec-1682-4331-bf74-b715b0dc58a2" containerName="container-00" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.797978 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="d299f6ec-1682-4331-bf74-b715b0dc58a2" containerName="container-00" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.798214 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="d299f6ec-1682-4331-bf74-b715b0dc58a2" containerName="container-00" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.799066 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.964194 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/033f84e1-f80f-4906-9f32-940a9e7f4b2d-host\") pod \"crc-debug-dqp2h\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:53 crc kubenswrapper[4941]: I1130 09:29:53.964285 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m27sz\" (UniqueName: \"kubernetes.io/projected/033f84e1-f80f-4906-9f32-940a9e7f4b2d-kube-api-access-m27sz\") pod \"crc-debug-dqp2h\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:54 crc kubenswrapper[4941]: I1130 09:29:54.067701 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m27sz\" (UniqueName: \"kubernetes.io/projected/033f84e1-f80f-4906-9f32-940a9e7f4b2d-kube-api-access-m27sz\") pod \"crc-debug-dqp2h\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:54 crc kubenswrapper[4941]: I1130 09:29:54.067994 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/033f84e1-f80f-4906-9f32-940a9e7f4b2d-host\") pod \"crc-debug-dqp2h\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:54 crc kubenswrapper[4941]: I1130 09:29:54.068138 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/033f84e1-f80f-4906-9f32-940a9e7f4b2d-host\") pod \"crc-debug-dqp2h\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:54 crc kubenswrapper[4941]: I1130 09:29:54.090726 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m27sz\" (UniqueName: \"kubernetes.io/projected/033f84e1-f80f-4906-9f32-940a9e7f4b2d-kube-api-access-m27sz\") pod \"crc-debug-dqp2h\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:54 crc kubenswrapper[4941]: I1130 09:29:54.119797 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:55 crc kubenswrapper[4941]: I1130 09:29:55.132193 4941 generic.go:334] "Generic (PLEG): container finished" podID="033f84e1-f80f-4906-9f32-940a9e7f4b2d" containerID="0ebb9a88c684a0afe7e1d464f070974ef669edd5567a617c71fec48c82d70695" exitCode=1 Nov 30 09:29:55 crc kubenswrapper[4941]: I1130 09:29:55.132262 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" event={"ID":"033f84e1-f80f-4906-9f32-940a9e7f4b2d","Type":"ContainerDied","Data":"0ebb9a88c684a0afe7e1d464f070974ef669edd5567a617c71fec48c82d70695"} Nov 30 09:29:55 crc kubenswrapper[4941]: I1130 09:29:55.134442 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" event={"ID":"033f84e1-f80f-4906-9f32-940a9e7f4b2d","Type":"ContainerStarted","Data":"f48e2e81c035d4288614861d54c99a56bce33542f74b3a381377bd87d07d29f6"} Nov 30 09:29:55 crc kubenswrapper[4941]: I1130 09:29:55.179047 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-z2zpj/crc-debug-dqp2h"] Nov 30 09:29:55 crc kubenswrapper[4941]: I1130 09:29:55.190258 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-z2zpj/crc-debug-dqp2h"] Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.501887 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.657071 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/033f84e1-f80f-4906-9f32-940a9e7f4b2d-host" (OuterVolumeSpecName: "host") pod "033f84e1-f80f-4906-9f32-940a9e7f4b2d" (UID: "033f84e1-f80f-4906-9f32-940a9e7f4b2d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.656970 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/033f84e1-f80f-4906-9f32-940a9e7f4b2d-host\") pod \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.657478 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m27sz\" (UniqueName: \"kubernetes.io/projected/033f84e1-f80f-4906-9f32-940a9e7f4b2d-kube-api-access-m27sz\") pod \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\" (UID: \"033f84e1-f80f-4906-9f32-940a9e7f4b2d\") " Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.661066 4941 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/033f84e1-f80f-4906-9f32-940a9e7f4b2d-host\") on node \"crc\" DevicePath \"\"" Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.666645 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/033f84e1-f80f-4906-9f32-940a9e7f4b2d-kube-api-access-m27sz" (OuterVolumeSpecName: "kube-api-access-m27sz") pod "033f84e1-f80f-4906-9f32-940a9e7f4b2d" (UID: "033f84e1-f80f-4906-9f32-940a9e7f4b2d"). InnerVolumeSpecName "kube-api-access-m27sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:29:56 crc kubenswrapper[4941]: I1130 09:29:56.764064 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m27sz\" (UniqueName: \"kubernetes.io/projected/033f84e1-f80f-4906-9f32-940a9e7f4b2d-kube-api-access-m27sz\") on node \"crc\" DevicePath \"\"" Nov 30 09:29:57 crc kubenswrapper[4941]: I1130 09:29:57.165790 4941 scope.go:117] "RemoveContainer" containerID="0ebb9a88c684a0afe7e1d464f070974ef669edd5567a617c71fec48c82d70695" Nov 30 09:29:57 crc kubenswrapper[4941]: I1130 09:29:57.165848 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/crc-debug-dqp2h" Nov 30 09:29:57 crc kubenswrapper[4941]: I1130 09:29:57.534954 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="033f84e1-f80f-4906-9f32-940a9e7f4b2d" path="/var/lib/kubelet/pods/033f84e1-f80f-4906-9f32-940a9e7f4b2d/volumes" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.161726 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt"] Nov 30 09:30:00 crc kubenswrapper[4941]: E1130 09:30:00.163875 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="033f84e1-f80f-4906-9f32-940a9e7f4b2d" containerName="container-00" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.163972 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="033f84e1-f80f-4906-9f32-940a9e7f4b2d" containerName="container-00" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.164262 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="033f84e1-f80f-4906-9f32-940a9e7f4b2d" containerName="container-00" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.165203 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.168348 4941 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.178173 4941 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.178692 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt"] Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.348934 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b0ffa6d-a179-41bf-961b-4de988434f3e-secret-volume\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.349006 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2smk\" (UniqueName: \"kubernetes.io/projected/6b0ffa6d-a179-41bf-961b-4de988434f3e-kube-api-access-k2smk\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.349053 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b0ffa6d-a179-41bf-961b-4de988434f3e-config-volume\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.451173 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b0ffa6d-a179-41bf-961b-4de988434f3e-secret-volume\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.451262 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2smk\" (UniqueName: \"kubernetes.io/projected/6b0ffa6d-a179-41bf-961b-4de988434f3e-kube-api-access-k2smk\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.451317 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b0ffa6d-a179-41bf-961b-4de988434f3e-config-volume\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.452288 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b0ffa6d-a179-41bf-961b-4de988434f3e-config-volume\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.464588 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b0ffa6d-a179-41bf-961b-4de988434f3e-secret-volume\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.471211 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2smk\" (UniqueName: \"kubernetes.io/projected/6b0ffa6d-a179-41bf-961b-4de988434f3e-kube-api-access-k2smk\") pod \"collect-profiles-29408250-dkrtt\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:00 crc kubenswrapper[4941]: I1130 09:30:00.491280 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:01 crc kubenswrapper[4941]: I1130 09:30:01.009872 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt"] Nov 30 09:30:01 crc kubenswrapper[4941]: I1130 09:30:01.216214 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" event={"ID":"6b0ffa6d-a179-41bf-961b-4de988434f3e","Type":"ContainerStarted","Data":"87b0d93a10fd33e3c1222e9e7b4bc3ab5d20fbd9841f95815390678256d3fc57"} Nov 30 09:30:02 crc kubenswrapper[4941]: I1130 09:30:02.229890 4941 generic.go:334] "Generic (PLEG): container finished" podID="6b0ffa6d-a179-41bf-961b-4de988434f3e" containerID="56bc376339d991ede2a87cc31b6e6b279ea695706ea5f4874bea29a070d7989d" exitCode=0 Nov 30 09:30:02 crc kubenswrapper[4941]: I1130 09:30:02.229968 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" event={"ID":"6b0ffa6d-a179-41bf-961b-4de988434f3e","Type":"ContainerDied","Data":"56bc376339d991ede2a87cc31b6e6b279ea695706ea5f4874bea29a070d7989d"} Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.528223 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:30:03 crc kubenswrapper[4941]: E1130 09:30:03.528804 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.753390 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.847498 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2smk\" (UniqueName: \"kubernetes.io/projected/6b0ffa6d-a179-41bf-961b-4de988434f3e-kube-api-access-k2smk\") pod \"6b0ffa6d-a179-41bf-961b-4de988434f3e\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.847938 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b0ffa6d-a179-41bf-961b-4de988434f3e-secret-volume\") pod \"6b0ffa6d-a179-41bf-961b-4de988434f3e\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.848001 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b0ffa6d-a179-41bf-961b-4de988434f3e-config-volume\") pod \"6b0ffa6d-a179-41bf-961b-4de988434f3e\" (UID: \"6b0ffa6d-a179-41bf-961b-4de988434f3e\") " Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.849847 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b0ffa6d-a179-41bf-961b-4de988434f3e-config-volume" (OuterVolumeSpecName: "config-volume") pod "6b0ffa6d-a179-41bf-961b-4de988434f3e" (UID: "6b0ffa6d-a179-41bf-961b-4de988434f3e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.859306 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b0ffa6d-a179-41bf-961b-4de988434f3e-kube-api-access-k2smk" (OuterVolumeSpecName: "kube-api-access-k2smk") pod "6b0ffa6d-a179-41bf-961b-4de988434f3e" (UID: "6b0ffa6d-a179-41bf-961b-4de988434f3e"). InnerVolumeSpecName "kube-api-access-k2smk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.869645 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b0ffa6d-a179-41bf-961b-4de988434f3e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6b0ffa6d-a179-41bf-961b-4de988434f3e" (UID: "6b0ffa6d-a179-41bf-961b-4de988434f3e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.950647 4941 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6b0ffa6d-a179-41bf-961b-4de988434f3e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.950690 4941 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6b0ffa6d-a179-41bf-961b-4de988434f3e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 30 09:30:03 crc kubenswrapper[4941]: I1130 09:30:03.950701 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2smk\" (UniqueName: \"kubernetes.io/projected/6b0ffa6d-a179-41bf-961b-4de988434f3e-kube-api-access-k2smk\") on node \"crc\" DevicePath \"\"" Nov 30 09:30:04 crc kubenswrapper[4941]: I1130 09:30:04.254070 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" event={"ID":"6b0ffa6d-a179-41bf-961b-4de988434f3e","Type":"ContainerDied","Data":"87b0d93a10fd33e3c1222e9e7b4bc3ab5d20fbd9841f95815390678256d3fc57"} Nov 30 09:30:04 crc kubenswrapper[4941]: I1130 09:30:04.254107 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29408250-dkrtt" Nov 30 09:30:04 crc kubenswrapper[4941]: I1130 09:30:04.254125 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87b0d93a10fd33e3c1222e9e7b4bc3ab5d20fbd9841f95815390678256d3fc57" Nov 30 09:30:04 crc kubenswrapper[4941]: I1130 09:30:04.845415 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5"] Nov 30 09:30:04 crc kubenswrapper[4941]: I1130 09:30:04.860603 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29408205-rbdm5"] Nov 30 09:30:05 crc kubenswrapper[4941]: I1130 09:30:05.534573 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="152a5a57-c99f-42f3-8050-101bcfc1f2ae" path="/var/lib/kubelet/pods/152a5a57-c99f-42f3-8050-101bcfc1f2ae/volumes" Nov 30 09:30:15 crc kubenswrapper[4941]: I1130 09:30:15.521851 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:30:15 crc kubenswrapper[4941]: E1130 09:30:15.522724 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:30:28 crc kubenswrapper[4941]: I1130 09:30:28.523095 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:30:28 crc kubenswrapper[4941]: E1130 09:30:28.523987 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:30:39 crc kubenswrapper[4941]: I1130 09:30:39.530336 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:30:39 crc kubenswrapper[4941]: E1130 09:30:39.531275 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:30:50 crc kubenswrapper[4941]: I1130 09:30:50.522940 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:30:50 crc kubenswrapper[4941]: E1130 09:30:50.524010 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:30:54 crc kubenswrapper[4941]: I1130 09:30:54.365732 4941 scope.go:117] "RemoveContainer" containerID="293cfdc2f87f1cd001abeb0f1cdf1141a38b18bc69033b5ca8362178b93c3b55" Nov 30 09:31:01 crc kubenswrapper[4941]: I1130 09:31:01.522031 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:31:01 crc kubenswrapper[4941]: E1130 09:31:01.522753 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:31:16 crc kubenswrapper[4941]: I1130 09:31:16.523117 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:31:16 crc kubenswrapper[4941]: E1130 09:31:16.524108 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:31:31 crc kubenswrapper[4941]: I1130 09:31:31.522210 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:31:31 crc kubenswrapper[4941]: E1130 09:31:31.525154 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:31:43 crc kubenswrapper[4941]: I1130 09:31:43.522841 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:31:44 crc kubenswrapper[4941]: I1130 09:31:44.372516 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"753d67d459852b50e5ca02ffb3cea393cf13d47c8faaf0defaa8dd4fdd572223"} Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.061606 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tv7xs"] Nov 30 09:31:49 crc kubenswrapper[4941]: E1130 09:31:49.063421 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b0ffa6d-a179-41bf-961b-4de988434f3e" containerName="collect-profiles" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.063441 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b0ffa6d-a179-41bf-961b-4de988434f3e" containerName="collect-profiles" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.063756 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b0ffa6d-a179-41bf-961b-4de988434f3e" containerName="collect-profiles" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.065914 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.085724 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tv7xs"] Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.173829 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-catalog-content\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.173979 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-utilities\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.174009 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f864\" (UniqueName: \"kubernetes.io/projected/f1755faa-6bb0-43a7-ae94-3ea1343a3733-kube-api-access-4f864\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.276569 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-catalog-content\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.276989 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-utilities\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.277022 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f864\" (UniqueName: \"kubernetes.io/projected/f1755faa-6bb0-43a7-ae94-3ea1343a3733-kube-api-access-4f864\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.277830 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-catalog-content\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.278048 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-utilities\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.296828 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f864\" (UniqueName: \"kubernetes.io/projected/f1755faa-6bb0-43a7-ae94-3ea1343a3733-kube-api-access-4f864\") pod \"certified-operators-tv7xs\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:49 crc kubenswrapper[4941]: I1130 09:31:49.424086 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:50 crc kubenswrapper[4941]: I1130 09:31:50.056635 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tv7xs"] Nov 30 09:31:50 crc kubenswrapper[4941]: I1130 09:31:50.457409 4941 generic.go:334] "Generic (PLEG): container finished" podID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerID="23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24" exitCode=0 Nov 30 09:31:50 crc kubenswrapper[4941]: I1130 09:31:50.457465 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerDied","Data":"23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24"} Nov 30 09:31:50 crc kubenswrapper[4941]: I1130 09:31:50.457495 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerStarted","Data":"5c5a89a6bf85e1fdbec12c2ca8d40666eb49e9a0b5d67632a44d6c7a7206d242"} Nov 30 09:31:50 crc kubenswrapper[4941]: I1130 09:31:50.460193 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 09:31:52 crc kubenswrapper[4941]: I1130 09:31:52.481276 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerStarted","Data":"2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb"} Nov 30 09:31:53 crc kubenswrapper[4941]: I1130 09:31:53.531172 4941 generic.go:334] "Generic (PLEG): container finished" podID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerID="2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb" exitCode=0 Nov 30 09:31:53 crc kubenswrapper[4941]: I1130 09:31:53.541142 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerDied","Data":"2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb"} Nov 30 09:31:54 crc kubenswrapper[4941]: I1130 09:31:54.546109 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerStarted","Data":"966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0"} Nov 30 09:31:54 crc kubenswrapper[4941]: I1130 09:31:54.578968 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tv7xs" podStartSLOduration=2.108457123 podStartE2EDuration="5.578948483s" podCreationTimestamp="2025-11-30 09:31:49 +0000 UTC" firstStartedPulling="2025-11-30 09:31:50.45996125 +0000 UTC m=+9931.228132859" lastFinishedPulling="2025-11-30 09:31:53.93045261 +0000 UTC m=+9934.698624219" observedRunningTime="2025-11-30 09:31:54.566794776 +0000 UTC m=+9935.334966385" watchObservedRunningTime="2025-11-30 09:31:54.578948483 +0000 UTC m=+9935.347120092" Nov 30 09:31:59 crc kubenswrapper[4941]: I1130 09:31:59.424364 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:59 crc kubenswrapper[4941]: I1130 09:31:59.426146 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:59 crc kubenswrapper[4941]: I1130 09:31:59.485679 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:59 crc kubenswrapper[4941]: I1130 09:31:59.651031 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:31:59 crc kubenswrapper[4941]: I1130 09:31:59.731320 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tv7xs"] Nov 30 09:32:01 crc kubenswrapper[4941]: I1130 09:32:01.618073 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tv7xs" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="registry-server" containerID="cri-o://966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0" gracePeriod=2 Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.189557 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.321187 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-utilities\") pod \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.321547 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-catalog-content\") pod \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.321758 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4f864\" (UniqueName: \"kubernetes.io/projected/f1755faa-6bb0-43a7-ae94-3ea1343a3733-kube-api-access-4f864\") pod \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\" (UID: \"f1755faa-6bb0-43a7-ae94-3ea1343a3733\") " Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.322345 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-utilities" (OuterVolumeSpecName: "utilities") pod "f1755faa-6bb0-43a7-ae94-3ea1343a3733" (UID: "f1755faa-6bb0-43a7-ae94-3ea1343a3733"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.342933 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1755faa-6bb0-43a7-ae94-3ea1343a3733-kube-api-access-4f864" (OuterVolumeSpecName: "kube-api-access-4f864") pod "f1755faa-6bb0-43a7-ae94-3ea1343a3733" (UID: "f1755faa-6bb0-43a7-ae94-3ea1343a3733"). InnerVolumeSpecName "kube-api-access-4f864". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.367689 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1755faa-6bb0-43a7-ae94-3ea1343a3733" (UID: "f1755faa-6bb0-43a7-ae94-3ea1343a3733"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.424159 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.424193 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1755faa-6bb0-43a7-ae94-3ea1343a3733-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.424208 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4f864\" (UniqueName: \"kubernetes.io/projected/f1755faa-6bb0-43a7-ae94-3ea1343a3733-kube-api-access-4f864\") on node \"crc\" DevicePath \"\"" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.632895 4941 generic.go:334] "Generic (PLEG): container finished" podID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerID="966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0" exitCode=0 Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.632952 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerDied","Data":"966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0"} Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.632984 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tv7xs" event={"ID":"f1755faa-6bb0-43a7-ae94-3ea1343a3733","Type":"ContainerDied","Data":"5c5a89a6bf85e1fdbec12c2ca8d40666eb49e9a0b5d67632a44d6c7a7206d242"} Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.633003 4941 scope.go:117] "RemoveContainer" containerID="966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.633254 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tv7xs" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.654542 4941 scope.go:117] "RemoveContainer" containerID="2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.674044 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tv7xs"] Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.684262 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tv7xs"] Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.705738 4941 scope.go:117] "RemoveContainer" containerID="23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.736572 4941 scope.go:117] "RemoveContainer" containerID="966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0" Nov 30 09:32:02 crc kubenswrapper[4941]: E1130 09:32:02.743907 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0\": container with ID starting with 966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0 not found: ID does not exist" containerID="966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.743965 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0"} err="failed to get container status \"966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0\": rpc error: code = NotFound desc = could not find container \"966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0\": container with ID starting with 966a0361ea9c14e2c3406eaa6b792baa4562594f785f9f8a542d5b90cdeeb8d0 not found: ID does not exist" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.744003 4941 scope.go:117] "RemoveContainer" containerID="2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb" Nov 30 09:32:02 crc kubenswrapper[4941]: E1130 09:32:02.744481 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb\": container with ID starting with 2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb not found: ID does not exist" containerID="2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.744520 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb"} err="failed to get container status \"2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb\": rpc error: code = NotFound desc = could not find container \"2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb\": container with ID starting with 2e7502d37bdfe21942c9430d49158d6cd77543e00daacc17050d657e3e65fbfb not found: ID does not exist" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.744535 4941 scope.go:117] "RemoveContainer" containerID="23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24" Nov 30 09:32:02 crc kubenswrapper[4941]: E1130 09:32:02.744877 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24\": container with ID starting with 23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24 not found: ID does not exist" containerID="23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24" Nov 30 09:32:02 crc kubenswrapper[4941]: I1130 09:32:02.744917 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24"} err="failed to get container status \"23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24\": rpc error: code = NotFound desc = could not find container \"23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24\": container with ID starting with 23662e544f72c817dd6661436042219081d51fb884a4de9e7fe8ad00afd7bc24 not found: ID does not exist" Nov 30 09:32:03 crc kubenswrapper[4941]: I1130 09:32:03.540842 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" path="/var/lib/kubelet/pods/f1755faa-6bb0-43a7-ae94-3ea1343a3733/volumes" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.070209 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gnx8c"] Nov 30 09:32:17 crc kubenswrapper[4941]: E1130 09:32:17.071383 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="registry-server" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.071400 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="registry-server" Nov 30 09:32:17 crc kubenswrapper[4941]: E1130 09:32:17.071410 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="extract-utilities" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.071417 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="extract-utilities" Nov 30 09:32:17 crc kubenswrapper[4941]: E1130 09:32:17.071429 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="extract-content" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.071438 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="extract-content" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.071649 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1755faa-6bb0-43a7-ae94-3ea1343a3733" containerName="registry-server" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.074305 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.086289 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gnx8c"] Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.176923 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6rg9\" (UniqueName: \"kubernetes.io/projected/1abd43b7-fa08-4bba-877e-94b77072a6da-kube-api-access-v6rg9\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.177053 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-utilities\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.177379 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-catalog-content\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.280117 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-catalog-content\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.280282 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6rg9\" (UniqueName: \"kubernetes.io/projected/1abd43b7-fa08-4bba-877e-94b77072a6da-kube-api-access-v6rg9\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.280347 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-utilities\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.280757 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-catalog-content\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.280857 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-utilities\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.798527 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6rg9\" (UniqueName: \"kubernetes.io/projected/1abd43b7-fa08-4bba-877e-94b77072a6da-kube-api-access-v6rg9\") pod \"redhat-marketplace-gnx8c\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:17 crc kubenswrapper[4941]: I1130 09:32:17.997025 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:18 crc kubenswrapper[4941]: I1130 09:32:18.468576 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gnx8c"] Nov 30 09:32:18 crc kubenswrapper[4941]: I1130 09:32:18.822663 4941 generic.go:334] "Generic (PLEG): container finished" podID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerID="d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894" exitCode=0 Nov 30 09:32:18 crc kubenswrapper[4941]: I1130 09:32:18.822803 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerDied","Data":"d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894"} Nov 30 09:32:18 crc kubenswrapper[4941]: I1130 09:32:18.823041 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerStarted","Data":"9eee44ef90f7756289db0223240dd38fd62cb102ee1de58787365e634c3454d0"} Nov 30 09:32:19 crc kubenswrapper[4941]: I1130 09:32:19.839801 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerStarted","Data":"83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111"} Nov 30 09:32:20 crc kubenswrapper[4941]: I1130 09:32:20.852616 4941 generic.go:334] "Generic (PLEG): container finished" podID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerID="83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111" exitCode=0 Nov 30 09:32:20 crc kubenswrapper[4941]: I1130 09:32:20.852730 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerDied","Data":"83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111"} Nov 30 09:32:21 crc kubenswrapper[4941]: I1130 09:32:21.865735 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerStarted","Data":"d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e"} Nov 30 09:32:21 crc kubenswrapper[4941]: I1130 09:32:21.899361 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gnx8c" podStartSLOduration=2.314909642 podStartE2EDuration="4.899327629s" podCreationTimestamp="2025-11-30 09:32:17 +0000 UTC" firstStartedPulling="2025-11-30 09:32:18.82468865 +0000 UTC m=+9959.592860259" lastFinishedPulling="2025-11-30 09:32:21.409106637 +0000 UTC m=+9962.177278246" observedRunningTime="2025-11-30 09:32:21.885579763 +0000 UTC m=+9962.653751372" watchObservedRunningTime="2025-11-30 09:32:21.899327629 +0000 UTC m=+9962.667499238" Nov 30 09:32:27 crc kubenswrapper[4941]: I1130 09:32:27.997854 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:27 crc kubenswrapper[4941]: I1130 09:32:27.998529 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:28 crc kubenswrapper[4941]: I1130 09:32:28.074374 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:28 crc kubenswrapper[4941]: I1130 09:32:28.995431 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:31 crc kubenswrapper[4941]: I1130 09:32:31.664420 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gnx8c"] Nov 30 09:32:31 crc kubenswrapper[4941]: I1130 09:32:31.982850 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gnx8c" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="registry-server" containerID="cri-o://d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e" gracePeriod=2 Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.593186 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.707289 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-utilities\") pod \"1abd43b7-fa08-4bba-877e-94b77072a6da\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.707534 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6rg9\" (UniqueName: \"kubernetes.io/projected/1abd43b7-fa08-4bba-877e-94b77072a6da-kube-api-access-v6rg9\") pod \"1abd43b7-fa08-4bba-877e-94b77072a6da\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.707640 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-catalog-content\") pod \"1abd43b7-fa08-4bba-877e-94b77072a6da\" (UID: \"1abd43b7-fa08-4bba-877e-94b77072a6da\") " Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.708317 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-utilities" (OuterVolumeSpecName: "utilities") pod "1abd43b7-fa08-4bba-877e-94b77072a6da" (UID: "1abd43b7-fa08-4bba-877e-94b77072a6da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.710030 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.729427 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1abd43b7-fa08-4bba-877e-94b77072a6da" (UID: "1abd43b7-fa08-4bba-877e-94b77072a6da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.812035 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1abd43b7-fa08-4bba-877e-94b77072a6da-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.995479 4941 generic.go:334] "Generic (PLEG): container finished" podID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerID="d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e" exitCode=0 Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.995618 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerDied","Data":"d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e"} Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.995931 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gnx8c" event={"ID":"1abd43b7-fa08-4bba-877e-94b77072a6da","Type":"ContainerDied","Data":"9eee44ef90f7756289db0223240dd38fd62cb102ee1de58787365e634c3454d0"} Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.995977 4941 scope.go:117] "RemoveContainer" containerID="d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e" Nov 30 09:32:32 crc kubenswrapper[4941]: I1130 09:32:32.995644 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gnx8c" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.022122 4941 scope.go:117] "RemoveContainer" containerID="83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.188786 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1abd43b7-fa08-4bba-877e-94b77072a6da-kube-api-access-v6rg9" (OuterVolumeSpecName: "kube-api-access-v6rg9") pod "1abd43b7-fa08-4bba-877e-94b77072a6da" (UID: "1abd43b7-fa08-4bba-877e-94b77072a6da"). InnerVolumeSpecName "kube-api-access-v6rg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.208014 4941 scope.go:117] "RemoveContainer" containerID="d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.223830 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6rg9\" (UniqueName: \"kubernetes.io/projected/1abd43b7-fa08-4bba-877e-94b77072a6da-kube-api-access-v6rg9\") on node \"crc\" DevicePath \"\"" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.301995 4941 scope.go:117] "RemoveContainer" containerID="d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e" Nov 30 09:32:33 crc kubenswrapper[4941]: E1130 09:32:33.302535 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e\": container with ID starting with d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e not found: ID does not exist" containerID="d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.302580 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e"} err="failed to get container status \"d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e\": rpc error: code = NotFound desc = could not find container \"d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e\": container with ID starting with d8e13f1970e223b6a70aa23bed00fb9a5edb7f0082464572d64be0fff882420e not found: ID does not exist" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.302613 4941 scope.go:117] "RemoveContainer" containerID="83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111" Nov 30 09:32:33 crc kubenswrapper[4941]: E1130 09:32:33.303100 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111\": container with ID starting with 83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111 not found: ID does not exist" containerID="83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.303135 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111"} err="failed to get container status \"83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111\": rpc error: code = NotFound desc = could not find container \"83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111\": container with ID starting with 83f8ae2a47c63632d60f2bfee9b5fdd947b100546226e26f5cbc950623d61111 not found: ID does not exist" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.303154 4941 scope.go:117] "RemoveContainer" containerID="d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894" Nov 30 09:32:33 crc kubenswrapper[4941]: E1130 09:32:33.303633 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894\": container with ID starting with d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894 not found: ID does not exist" containerID="d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.303695 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894"} err="failed to get container status \"d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894\": rpc error: code = NotFound desc = could not find container \"d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894\": container with ID starting with d7b7bbc855208c7c7609673d43dd309318d0b700bd475f7f09bc8069a391e894 not found: ID does not exist" Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.356967 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gnx8c"] Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.370145 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gnx8c"] Nov 30 09:32:33 crc kubenswrapper[4941]: I1130 09:32:33.532943 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" path="/var/lib/kubelet/pods/1abd43b7-fa08-4bba-877e-94b77072a6da/volumes" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.306125 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e/init-config-reloader/0.log" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.518730 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e/alertmanager/0.log" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.543222 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e/init-config-reloader/0.log" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.614131 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_d8c3c419-4754-4ee6-a7d4-3ea9c60fea9e/config-reloader/0.log" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.836787 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e0b4d9d1-d4d1-4136-b815-b582fbda8e7e/aodh-api/0.log" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.871069 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e0b4d9d1-d4d1-4136-b815-b582fbda8e7e/aodh-evaluator/0.log" Nov 30 09:32:53 crc kubenswrapper[4941]: I1130 09:32:53.903959 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e0b4d9d1-d4d1-4136-b815-b582fbda8e7e/aodh-listener/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.029845 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_e0b4d9d1-d4d1-4136-b815-b582fbda8e7e/aodh-notifier/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.116515 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-548cc55d7d-csbl8_9093646c-4fff-4bb3-8b5e-c040a3da6cd7/barbican-api/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.212033 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-548cc55d7d-csbl8_9093646c-4fff-4bb3-8b5e-c040a3da6cd7/barbican-api-log/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.387703 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-756cddb678-ld46p_a1602b04-2515-4374-aa8a-04802f9cd569/barbican-keystone-listener/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.759258 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-756cddb678-ld46p_a1602b04-2515-4374-aa8a-04802f9cd569/barbican-keystone-listener-log/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.762589 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f4878bc6c-wfqmk_5265e97f-b224-4441-99fe-716a0df577f6/barbican-worker-log/0.log" Nov 30 09:32:54 crc kubenswrapper[4941]: I1130 09:32:54.765279 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f4878bc6c-wfqmk_5265e97f-b224-4441-99fe-716a0df577f6/barbican-worker/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.029676 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-qsd9n_3215bae2-d62e-4687-bd5b-8d51625ba47c/bootstrap-openstack-openstack-cell1/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.080162 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fe2c62f2-5689-47c8-bf38-dc1ec1547e8e/ceilometer-central-agent/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.212634 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fe2c62f2-5689-47c8-bf38-dc1ec1547e8e/ceilometer-notification-agent/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.295316 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fe2c62f2-5689-47c8-bf38-dc1ec1547e8e/proxy-httpd/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.337270 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fe2c62f2-5689-47c8-bf38-dc1ec1547e8e/sg-core/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.517064 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-kjp4b_2bb75047-8a2a-4187-a104-95b1a83c4b02/ceph-client-openstack-openstack-cell1/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.629230 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f9d9e3e8-f265-4ef9-9f1f-9e66b80be876/cinder-api/0.log" Nov 30 09:32:55 crc kubenswrapper[4941]: I1130 09:32:55.743470 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_f9d9e3e8-f265-4ef9-9f1f-9e66b80be876/cinder-api-log/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.019896 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7/cinder-backup/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.060716 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_4b1e8173-fb77-47d7-8a0f-4d7fc86cd9f7/probe/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.129989 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_99967ec0-73ff-4130-9f6f-2287978e418c/cinder-scheduler/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.402303 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_f52fb817-2c20-4482-a999-7780b6902951/cinder-volume/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.404147 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_99967ec0-73ff-4130-9f6f-2287978e418c/probe/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.445726 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_f52fb817-2c20-4482-a999-7780b6902951/probe/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.653046 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-dc8gw_e432e891-b748-4cee-a941-553e6f7d6140/configure-network-openstack-openstack-cell1/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.767990 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-5prpd_12f91628-4c27-4506-9398-777d63c07d0b/configure-os-openstack-openstack-cell1/0.log" Nov 30 09:32:56 crc kubenswrapper[4941]: I1130 09:32:56.896113 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79ff8bb889-l8cq5_b4296723-5d2c-4376-927c-4fb06c557533/init/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.232636 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79ff8bb889-l8cq5_b4296723-5d2c-4376-927c-4fb06c557533/dnsmasq-dns/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.250491 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79ff8bb889-l8cq5_b4296723-5d2c-4376-927c-4fb06c557533/init/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.252706 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-mmdxs_00aafada-6a3d-4ea6-8e0d-e7090d7acd63/download-cache-openstack-openstack-cell1/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.767927 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a779aa82-744f-4b58-b795-8a04ae715a62/glance-httpd/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.802366 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_a779aa82-744f-4b58-b795-8a04ae715a62/glance-log/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.889072 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_307bea18-b42b-4fb3-a880-90208b196d4c/glance-log/0.log" Nov 30 09:32:57 crc kubenswrapper[4941]: I1130 09:32:57.898516 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_307bea18-b42b-4fb3-a880-90208b196d4c/glance-httpd/0.log" Nov 30 09:32:58 crc kubenswrapper[4941]: I1130 09:32:58.139313 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-5668d7bff6-xcb8s_ae5e67e1-01bb-403d-ab7e-b21042b07f87/heat-api/0.log" Nov 30 09:32:58 crc kubenswrapper[4941]: I1130 09:32:58.288988 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-774d8c7f69-ws8bn_c9f0701b-867f-4c22-9d1b-e01a5644424b/heat-cfnapi/0.log" Nov 30 09:32:58 crc kubenswrapper[4941]: I1130 09:32:58.386325 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-6df4c5b7cd-hmfx6_2080cb1e-6630-4e86-9bfd-61ce1d7490fc/heat-engine/0.log" Nov 30 09:32:58 crc kubenswrapper[4941]: I1130 09:32:58.653589 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5cb476c99f-sf99n_24e1159e-4632-4a9e-b573-c0625eaf4a93/horizon/0.log" Nov 30 09:32:58 crc kubenswrapper[4941]: I1130 09:32:58.711596 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5cb476c99f-sf99n_24e1159e-4632-4a9e-b573-c0625eaf4a93/horizon-log/0.log" Nov 30 09:32:59 crc kubenswrapper[4941]: I1130 09:32:59.418580 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-ktf6t_a0a57fdf-dbf3-4dec-8079-ecfffe844fa3/install-certs-openstack-openstack-cell1/0.log" Nov 30 09:32:59 crc kubenswrapper[4941]: I1130 09:32:59.452957 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-qj956_716f9427-4add-453c-9d69-f40949a5ee12/install-os-openstack-openstack-cell1/0.log" Nov 30 09:32:59 crc kubenswrapper[4941]: I1130 09:32:59.741256 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29408221-spdm4_90448577-f8e3-4277-923b-50d69fc2273e/keystone-cron/0.log" Nov 30 09:32:59 crc kubenswrapper[4941]: I1130 09:32:59.904783 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_f664d4a3-d502-4925-aa66-755419694d2b/kube-state-metrics/0.log" Nov 30 09:32:59 crc kubenswrapper[4941]: I1130 09:32:59.967401 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-76dc6df48-fpk8r_9a6f721e-1e8e-4f88-a383-4d5bb29a4c27/keystone-api/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.072349 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-fmfhx_c6e65b7b-aabd-4898-b48f-90a421e836a5/libvirt-openstack-openstack-cell1/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.208127 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_33eeedb0-e517-4da2-a25e-0faa7c669fde/manila-api-log/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.401198 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_33eeedb0-e517-4da2-a25e-0faa7c669fde/manila-api/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.435832 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_272642ef-9daa-4958-9645-ac66bdd43cce/manila-scheduler/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.462178 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_272642ef-9daa-4958-9645-ac66bdd43cce/probe/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.688217 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_965722c8-5980-41b0-92e7-ebca41c408e2/probe/0.log" Nov 30 09:33:00 crc kubenswrapper[4941]: I1130 09:33:00.716166 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_965722c8-5980-41b0-92e7-ebca41c408e2/manila-share/0.log" Nov 30 09:33:01 crc kubenswrapper[4941]: I1130 09:33:01.196735 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-68c887db59-hwrwh_240bba4d-ff88-447f-8879-045591768175/neutron-httpd/0.log" Nov 30 09:33:01 crc kubenswrapper[4941]: I1130 09:33:01.623797 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-68c887db59-hwrwh_240bba4d-ff88-447f-8879-045591768175/neutron-api/0.log" Nov 30 09:33:01 crc kubenswrapper[4941]: I1130 09:33:01.869992 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-7llxw_2cc0b49d-63a4-4376-87d3-9866a06212e7/neutron-dhcp-openstack-openstack-cell1/0.log" Nov 30 09:33:01 crc kubenswrapper[4941]: I1130 09:33:01.970632 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-467cs_984773df-2a8c-4db3-a97e-993d3e6985fc/neutron-metadata-openstack-openstack-cell1/0.log" Nov 30 09:33:02 crc kubenswrapper[4941]: I1130 09:33:02.224024 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-cvf6p_34bc5f99-c870-4bc0-9873-b87d4f3fa30a/neutron-sriov-openstack-openstack-cell1/0.log" Nov 30 09:33:02 crc kubenswrapper[4941]: I1130 09:33:02.412576 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d7bb713a-b0ae-4bed-82ce-f1fa4de9281e/nova-api-api/0.log" Nov 30 09:33:02 crc kubenswrapper[4941]: I1130 09:33:02.614235 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d7bb713a-b0ae-4bed-82ce-f1fa4de9281e/nova-api-log/0.log" Nov 30 09:33:02 crc kubenswrapper[4941]: I1130 09:33:02.735549 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_5606214f-0456-4a22-87dd-a3a6624afaf5/nova-cell0-conductor-conductor/0.log" Nov 30 09:33:02 crc kubenswrapper[4941]: I1130 09:33:02.915174 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_16a91c01-c6ad-40d3-8f79-9a19235f0964/nova-cell1-conductor-conductor/0.log" Nov 30 09:33:03 crc kubenswrapper[4941]: I1130 09:33:03.091526 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_4b88b054-0071-4404-8749-7348030d171b/nova-cell1-novncproxy-novncproxy/0.log" Nov 30 09:33:03 crc kubenswrapper[4941]: I1130 09:33:03.260229 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell9pxcf_db6bcef0-5db9-4007-a09c-1435e4d37b48/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Nov 30 09:33:03 crc kubenswrapper[4941]: I1130 09:33:03.480474 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-8ktj4_a342b77c-f58f-499e-9671-a67cd80d9e3e/nova-cell1-openstack-openstack-cell1/0.log" Nov 30 09:33:03 crc kubenswrapper[4941]: I1130 09:33:03.714665 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7fe20d7b-d57d-4291-b62a-a7d0e417ebdd/nova-metadata-log/0.log" Nov 30 09:33:03 crc kubenswrapper[4941]: I1130 09:33:03.733881 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_7fe20d7b-d57d-4291-b62a-a7d0e417ebdd/nova-metadata-metadata/0.log" Nov 30 09:33:03 crc kubenswrapper[4941]: I1130 09:33:03.894622 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_d369d884-79c4-4bd4-b952-0076f23d8e66/nova-scheduler-scheduler/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.111729 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_69a33c95-4877-49f2-9bcc-59d5c750a626/mysql-bootstrap/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.256361 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_69a33c95-4877-49f2-9bcc-59d5c750a626/mysql-bootstrap/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.290046 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_69a33c95-4877-49f2-9bcc-59d5c750a626/galera/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.458117 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e7d6456a-db62-4df3-856e-5896c4798d2f/mysql-bootstrap/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.685893 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_84f0bf8c-bf1e-4179-b156-652c9591c146/openstackclient/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.691681 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e7d6456a-db62-4df3-856e-5896c4798d2f/mysql-bootstrap/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.817767 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_e7d6456a-db62-4df3-856e-5896c4798d2f/galera/0.log" Nov 30 09:33:04 crc kubenswrapper[4941]: I1130 09:33:04.933952 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6817f3c1-bc9d-462e-a494-5ac7a10661a5/openstack-network-exporter/0.log" Nov 30 09:33:05 crc kubenswrapper[4941]: I1130 09:33:05.083033 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_6817f3c1-bc9d-462e-a494-5ac7a10661a5/ovn-northd/0.log" Nov 30 09:33:05 crc kubenswrapper[4941]: I1130 09:33:05.319747 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d/openstack-network-exporter/0.log" Nov 30 09:33:05 crc kubenswrapper[4941]: I1130 09:33:05.322124 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-hj9k4_3bf253a3-5fac-4f21-bfdf-78179dd8c647/ovn-openstack-openstack-cell1/0.log" Nov 30 09:33:05 crc kubenswrapper[4941]: I1130 09:33:05.461432 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_3304fe93-73ea-4ebc-ab5d-6b4e761c0c2d/ovsdbserver-nb/0.log" Nov 30 09:33:05 crc kubenswrapper[4941]: I1130 09:33:05.595490 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_e6ac493b-d7d5-4a71-a87c-e0bd8de8afba/openstack-network-exporter/0.log" Nov 30 09:33:05 crc kubenswrapper[4941]: I1130 09:33:05.680036 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_e6ac493b-d7d5-4a71-a87c-e0bd8de8afba/ovsdbserver-nb/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.135585 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_5e18dd2f-0117-4e65-a420-190874e21598/openstack-network-exporter/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.188973 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_5e18dd2f-0117-4e65-a420-190874e21598/ovsdbserver-nb/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.392915 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ba0f12dd-04a2-47b7-94b3-14d62c68cbbe/ovsdbserver-sb/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.399673 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_ba0f12dd-04a2-47b7-94b3-14d62c68cbbe/openstack-network-exporter/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.635049 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6/openstack-network-exporter/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.743148 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_c6c1ed9f-9a62-4026-ab0c-6bdd3f6959e6/ovsdbserver-sb/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.800444 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_7fedd7ce-5656-4591-909c-b0ff87e1b969/openstack-network-exporter/0.log" Nov 30 09:33:06 crc kubenswrapper[4941]: I1130 09:33:06.895387 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_7fedd7ce-5656-4591-909c-b0ff87e1b969/ovsdbserver-sb/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.217380 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-95bccfd8-jcbmg_64e9210e-a153-46f5-91d1-227beb120823/placement-api/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.297515 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-95bccfd8-jcbmg_64e9210e-a153-46f5-91d1-227beb120823/placement-log/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.529767 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_97438922-72d6-4d56-bfa0-11e88de4d27f/init-config-reloader/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.593694 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cdkzwf_14bba754-3a7e-4d3e-a017-02842a5dc338/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.856161 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_97438922-72d6-4d56-bfa0-11e88de4d27f/config-reloader/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.871682 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_97438922-72d6-4d56-bfa0-11e88de4d27f/init-config-reloader/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.943575 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_97438922-72d6-4d56-bfa0-11e88de4d27f/prometheus/0.log" Nov 30 09:33:07 crc kubenswrapper[4941]: I1130 09:33:07.979613 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_97438922-72d6-4d56-bfa0-11e88de4d27f/thanos-sidecar/0.log" Nov 30 09:33:08 crc kubenswrapper[4941]: I1130 09:33:08.190100 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_293fc425-5b05-42e2-81c5-22e843125a15/setup-container/0.log" Nov 30 09:33:08 crc kubenswrapper[4941]: I1130 09:33:08.590957 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7047253b-c52c-40a1-a44c-f20c5e5fcdac/setup-container/0.log" Nov 30 09:33:08 crc kubenswrapper[4941]: I1130 09:33:08.619771 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_293fc425-5b05-42e2-81c5-22e843125a15/rabbitmq/0.log" Nov 30 09:33:08 crc kubenswrapper[4941]: I1130 09:33:08.745472 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_293fc425-5b05-42e2-81c5-22e843125a15/setup-container/0.log" Nov 30 09:33:08 crc kubenswrapper[4941]: I1130 09:33:08.859813 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7047253b-c52c-40a1-a44c-f20c5e5fcdac/setup-container/0.log" Nov 30 09:33:09 crc kubenswrapper[4941]: I1130 09:33:09.124887 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-gncsn_43bbf907-a7fd-45c7-ac8d-45259069d8ca/reboot-os-openstack-openstack-cell1/0.log" Nov 30 09:33:09 crc kubenswrapper[4941]: I1130 09:33:09.360749 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-4s4c4_e7c5b506-3ccd-4959-a8e5-dd1ec4dece1e/run-os-openstack-openstack-cell1/0.log" Nov 30 09:33:09 crc kubenswrapper[4941]: I1130 09:33:09.498114 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-58kvt_cda00e1a-68e9-45f3-85f8-840fc4614400/ssh-known-hosts-openstack/0.log" Nov 30 09:33:09 crc kubenswrapper[4941]: I1130 09:33:09.755596 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-57swc_ba4069f3-4792-4a09-a693-9c1f38b2514c/telemetry-openstack-openstack-cell1/0.log" Nov 30 09:33:10 crc kubenswrapper[4941]: I1130 09:33:10.027216 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_8501edc2-e733-4e75-9afd-ecefc4f74de2/tempest-tests-tempest-tests-runner/0.log" Nov 30 09:33:10 crc kubenswrapper[4941]: I1130 09:33:10.076518 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_4341309d-299a-4004-b88a-add57d4ea72f/test-operator-logs-container/0.log" Nov 30 09:33:10 crc kubenswrapper[4941]: I1130 09:33:10.455158 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-wxfmc_941a2803-c15d-4bd4-8348-e4cb3fd11d55/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Nov 30 09:33:10 crc kubenswrapper[4941]: I1130 09:33:10.647010 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-b9kqh_06030610-8d39-469d-8cc8-ec8dfe976e23/validate-network-openstack-openstack-cell1/0.log" Nov 30 09:33:10 crc kubenswrapper[4941]: I1130 09:33:10.651060 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7047253b-c52c-40a1-a44c-f20c5e5fcdac/rabbitmq/0.log" Nov 30 09:33:31 crc kubenswrapper[4941]: I1130 09:33:31.584918 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_f2c219f6-011a-47c3-81d2-99d151642f9a/memcached/0.log" Nov 30 09:33:39 crc kubenswrapper[4941]: I1130 09:33:39.793993 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerName="galera" probeResult="failure" output="command timed out" Nov 30 09:33:39 crc kubenswrapper[4941]: I1130 09:33:39.795885 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerName="galera" probeResult="failure" output="command timed out" Nov 30 09:33:42 crc kubenswrapper[4941]: I1130 09:33:42.078480 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/util/0.log" Nov 30 09:33:42 crc kubenswrapper[4941]: I1130 09:33:42.803032 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/util/0.log" Nov 30 09:33:42 crc kubenswrapper[4941]: I1130 09:33:42.803531 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/pull/0.log" Nov 30 09:33:42 crc kubenswrapper[4941]: I1130 09:33:42.841120 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/pull/0.log" Nov 30 09:33:42 crc kubenswrapper[4941]: I1130 09:33:42.980848 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/util/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.144809 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/extract/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.150152 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bbm48k_5edf826b-ce6f-4810-b400-743a0f02d05e/pull/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.241968 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7gqrn_689afda6-cba6-4975-be84-09c20304ae05/kube-rbac-proxy/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.468517 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-59m9r_e324349c-14cd-4043-9278-783e8faa883e/kube-rbac-proxy/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.509369 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7gqrn_689afda6-cba6-4975-be84-09c20304ae05/manager/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.620799 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-59m9r_e324349c-14cd-4043-9278-783e8faa883e/manager/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.765884 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-vk24n_9389b152-75d5-41ce-8638-f531ad93710d/kube-rbac-proxy/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.822706 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-vk24n_9389b152-75d5-41ce-8638-f531ad93710d/manager/0.log" Nov 30 09:33:43 crc kubenswrapper[4941]: I1130 09:33:43.986455 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-kwm52_d1af403d-ad9d-464b-b9f8-60a57868b8fb/kube-rbac-proxy/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.155033 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-kwm52_d1af403d-ad9d-464b-b9f8-60a57868b8fb/manager/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.206543 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-2vldw_a5e74e68-5dfa-41e0-9301-09f3e59450fe/kube-rbac-proxy/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.323539 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-2vldw_a5e74e68-5dfa-41e0-9301-09f3e59450fe/manager/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.427802 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-crskg_5ac344b8-6b4f-45a7-afbd-666188065ed6/kube-rbac-proxy/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.527167 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-crskg_5ac344b8-6b4f-45a7-afbd-666188065ed6/manager/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.750208 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-5vh4l_3e37a585-c770-4472-bf53-4be22b98550a/kube-rbac-proxy/0.log" Nov 30 09:33:44 crc kubenswrapper[4941]: I1130 09:33:44.880215 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-fg9bt_af1b065d-e876-4fd1-b63e-6c5015b7c169/kube-rbac-proxy/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.029466 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-5vh4l_3e37a585-c770-4472-bf53-4be22b98550a/manager/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.071926 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-fg9bt_af1b065d-e876-4fd1-b63e-6c5015b7c169/manager/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.331847 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-g2xq2_15d00f84-d2c3-445d-b411-3f0bca56234e/kube-rbac-proxy/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.423066 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-g2xq2_15d00f84-d2c3-445d-b411-3f0bca56234e/manager/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.460142 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-zscsf_cc51d757-c6d0-4fb1-9b26-4cb90ceacc60/kube-rbac-proxy/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.646064 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-kzvdx_7f3cfd2e-bec9-46f6-9161-92a9b33d38ac/kube-rbac-proxy/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.682015 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-zscsf_cc51d757-c6d0-4fb1-9b26-4cb90ceacc60/manager/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.802492 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-kzvdx_7f3cfd2e-bec9-46f6-9161-92a9b33d38ac/manager/0.log" Nov 30 09:33:45 crc kubenswrapper[4941]: I1130 09:33:45.951099 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2ccs4_83803d31-cd83-4860-8129-b3b1d717aadd/kube-rbac-proxy/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.075418 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2ccs4_83803d31-cd83-4860-8129-b3b1d717aadd/manager/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.147509 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-wt4hg_4f23afd3-e930-4f37-b76f-cb5a6e158796/kube-rbac-proxy/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.438452 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-wt4hg_4f23afd3-e930-4f37-b76f-cb5a6e158796/manager/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.445476 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-gl2n2_2c71dfd3-27b4-4ec1-9983-2a8351bf8d59/manager/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.460023 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-gl2n2_2c71dfd3-27b4-4ec1-9983-2a8351bf8d59/kube-rbac-proxy/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.653586 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446nm7h7_8be70097-8f2d-4a40-8d1f-57eadb38d1f2/kube-rbac-proxy/0.log" Nov 30 09:33:46 crc kubenswrapper[4941]: I1130 09:33:46.704704 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446nm7h7_8be70097-8f2d-4a40-8d1f-57eadb38d1f2/manager/0.log" Nov 30 09:33:47 crc kubenswrapper[4941]: I1130 09:33:47.349671 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6ddddd9d6f-zbv6n_a59f8b8f-0b1c-4ba5-8796-7c87e1ce6940/operator/0.log" Nov 30 09:33:47 crc kubenswrapper[4941]: I1130 09:33:47.489375 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-dfm6s_153a8f3b-e179-4d4c-9d5b-14bc3efdc8ea/registry-server/0.log" Nov 30 09:33:47 crc kubenswrapper[4941]: I1130 09:33:47.658141 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-l6kp5_17c5c370-aa15-4666-92dc-3ba34847a487/kube-rbac-proxy/0.log" Nov 30 09:33:47 crc kubenswrapper[4941]: I1130 09:33:47.876712 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-49bzg_6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e/kube-rbac-proxy/0.log" Nov 30 09:33:48 crc kubenswrapper[4941]: I1130 09:33:48.008297 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-49bzg_6af3ebbf-c01d-45a6-b88d-f1fa2aa8044e/manager/0.log" Nov 30 09:33:48 crc kubenswrapper[4941]: I1130 09:33:48.202625 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-l6kp5_17c5c370-aa15-4666-92dc-3ba34847a487/manager/0.log" Nov 30 09:33:48 crc kubenswrapper[4941]: I1130 09:33:48.366088 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-7mmgl_f27d34c3-fcc0-4180-8bc2-8d95bef6d2ec/operator/0.log" Nov 30 09:33:48 crc kubenswrapper[4941]: I1130 09:33:48.516815 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-85qhp_dedc405b-7c3e-4df1-afe4-63658d5a92ef/kube-rbac-proxy/0.log" Nov 30 09:33:48 crc kubenswrapper[4941]: I1130 09:33:48.665773 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-85qhp_dedc405b-7c3e-4df1-afe4-63658d5a92ef/manager/0.log" Nov 30 09:33:48 crc kubenswrapper[4941]: I1130 09:33:48.702821 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-kx5z9_4cd61cf4-a463-445a-83f1-2598f698d53a/kube-rbac-proxy/0.log" Nov 30 09:33:49 crc kubenswrapper[4941]: I1130 09:33:49.022981 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d628w_1111c4ca-9dea-44f1-b391-e534c8c31476/kube-rbac-proxy/0.log" Nov 30 09:33:49 crc kubenswrapper[4941]: I1130 09:33:49.158068 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-kx5z9_4cd61cf4-a463-445a-83f1-2598f698d53a/manager/0.log" Nov 30 09:33:49 crc kubenswrapper[4941]: I1130 09:33:49.162247 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-656fd97d56-nwtqx_43133738-5033-4356-a2d6-7f0a9b78c7f8/manager/0.log" Nov 30 09:33:49 crc kubenswrapper[4941]: I1130 09:33:49.209292 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d628w_1111c4ca-9dea-44f1-b391-e534c8c31476/manager/0.log" Nov 30 09:33:49 crc kubenswrapper[4941]: I1130 09:33:49.280886 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-898p4_3c1ab696-9519-4a69-82c6-4a7078a7472a/kube-rbac-proxy/0.log" Nov 30 09:33:49 crc kubenswrapper[4941]: I1130 09:33:49.456874 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-898p4_3c1ab696-9519-4a69-82c6-4a7078a7472a/manager/0.log" Nov 30 09:34:02 crc kubenswrapper[4941]: I1130 09:34:02.978601 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:34:02 crc kubenswrapper[4941]: I1130 09:34:02.979600 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:34:17 crc kubenswrapper[4941]: I1130 09:34:17.444905 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-ppcvh_63606771-b004-4903-a2a1-d5032a0fa94b/control-plane-machine-set-operator/0.log" Nov 30 09:34:17 crc kubenswrapper[4941]: I1130 09:34:17.522742 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mt5dk_465a2899-647c-4144-8810-46a4a4e49909/kube-rbac-proxy/0.log" Nov 30 09:34:17 crc kubenswrapper[4941]: I1130 09:34:17.712270 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-mt5dk_465a2899-647c-4144-8810-46a4a4e49909/machine-api-operator/0.log" Nov 30 09:34:32 crc kubenswrapper[4941]: I1130 09:34:32.978479 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:34:32 crc kubenswrapper[4941]: I1130 09:34:32.979813 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:34:35 crc kubenswrapper[4941]: I1130 09:34:35.286253 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-9h7bf_a624fc70-8bbc-4706-b2d8-221ec51c3f2c/cert-manager-controller/0.log" Nov 30 09:34:35 crc kubenswrapper[4941]: I1130 09:34:35.438983 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-2xsxn_c4fdd60d-955a-47b0-b721-f8d3517f866f/cert-manager-cainjector/0.log" Nov 30 09:34:35 crc kubenswrapper[4941]: I1130 09:34:35.509290 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-lbhgf_78aaf7c2-3ae5-411f-b08a-a8b57f11ea2a/cert-manager-webhook/0.log" Nov 30 09:34:52 crc kubenswrapper[4941]: I1130 09:34:52.166789 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-f7frt_5b760607-de49-4963-b72b-5bc4ff0f41b7/nmstate-console-plugin/0.log" Nov 30 09:34:52 crc kubenswrapper[4941]: I1130 09:34:52.393968 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-5t7sr_be9045de-8bc2-4342-bb17-345c07c16d74/nmstate-handler/0.log" Nov 30 09:34:52 crc kubenswrapper[4941]: I1130 09:34:52.444800 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-vnjxd_d958a2dd-c486-4357-8ac9-a808e3474e65/kube-rbac-proxy/0.log" Nov 30 09:34:52 crc kubenswrapper[4941]: I1130 09:34:52.509434 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-vnjxd_d958a2dd-c486-4357-8ac9-a808e3474e65/nmstate-metrics/0.log" Nov 30 09:34:52 crc kubenswrapper[4941]: I1130 09:34:52.704228 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-vmmqt_abcd3017-8b84-429b-8f2b-aa2137964cb6/nmstate-operator/0.log" Nov 30 09:34:52 crc kubenswrapper[4941]: I1130 09:34:52.759787 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-w527h_1d0ef5c7-2b43-4e45-979d-de23cce56371/nmstate-webhook/0.log" Nov 30 09:35:02 crc kubenswrapper[4941]: I1130 09:35:02.978891 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:35:02 crc kubenswrapper[4941]: I1130 09:35:02.979856 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:35:02 crc kubenswrapper[4941]: I1130 09:35:02.979957 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:35:02 crc kubenswrapper[4941]: I1130 09:35:02.981276 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"753d67d459852b50e5ca02ffb3cea393cf13d47c8faaf0defaa8dd4fdd572223"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:35:02 crc kubenswrapper[4941]: I1130 09:35:02.981394 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://753d67d459852b50e5ca02ffb3cea393cf13d47c8faaf0defaa8dd4fdd572223" gracePeriod=600 Nov 30 09:35:03 crc kubenswrapper[4941]: I1130 09:35:03.847101 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="753d67d459852b50e5ca02ffb3cea393cf13d47c8faaf0defaa8dd4fdd572223" exitCode=0 Nov 30 09:35:03 crc kubenswrapper[4941]: I1130 09:35:03.847192 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"753d67d459852b50e5ca02ffb3cea393cf13d47c8faaf0defaa8dd4fdd572223"} Nov 30 09:35:03 crc kubenswrapper[4941]: I1130 09:35:03.848098 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerStarted","Data":"00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc"} Nov 30 09:35:03 crc kubenswrapper[4941]: I1130 09:35:03.848135 4941 scope.go:117] "RemoveContainer" containerID="3206bfa618236bbbbd99648c6ff569e85ef5cf84d2ce4273525f54625449b5a5" Nov 30 09:35:11 crc kubenswrapper[4941]: I1130 09:35:11.221588 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-8gdxg_573f98db-7738-42ea-9668-ce64acfb316b/kube-rbac-proxy/0.log" Nov 30 09:35:11 crc kubenswrapper[4941]: I1130 09:35:11.558708 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-8gdxg_573f98db-7738-42ea-9668-ce64acfb316b/controller/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.048045 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-wbtcl_7bb61fc6-70c2-43ee-ae35-e597b7033250/frr-k8s-webhook-server/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.051109 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-frr-files/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.287564 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-frr-files/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.344081 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-reloader/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.363316 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-reloader/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.371405 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-metrics/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.639877 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-metrics/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.699156 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-metrics/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.700088 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-reloader/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.712451 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-frr-files/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.887098 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-frr-files/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.930304 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-metrics/0.log" Nov 30 09:35:12 crc kubenswrapper[4941]: I1130 09:35:12.953574 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/cp-reloader/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.016130 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/controller/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.170718 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/kube-rbac-proxy/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.203221 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/frr-metrics/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.256258 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/kube-rbac-proxy-frr/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.510547 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/reloader/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.566857 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-759d497878-7pn4c_21e69639-56df-4de5-8339-c864a9864237/manager/0.log" Nov 30 09:35:13 crc kubenswrapper[4941]: I1130 09:35:13.824064 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-55cd88cb6c-wlwwj_35bb24de-c515-4934-8de3-dce90b7d06ca/webhook-server/0.log" Nov 30 09:35:14 crc kubenswrapper[4941]: I1130 09:35:14.124043 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rqtrj_fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b/kube-rbac-proxy/0.log" Nov 30 09:35:15 crc kubenswrapper[4941]: I1130 09:35:15.024919 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rqtrj_fdc7052b-fe5b-416d-b1d4-6f0ccfd8193b/speaker/0.log" Nov 30 09:35:16 crc kubenswrapper[4941]: I1130 09:35:16.812957 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zfmgz_b6c0593d-b6b1-422c-84ec-a5cf8726ee39/frr/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.263771 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/util/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.538270 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/util/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.573902 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/pull/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.594178 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/pull/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.806182 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/pull/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.876353 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/util/0.log" Nov 30 09:35:31 crc kubenswrapper[4941]: I1130 09:35:31.878589 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a9vsmf_75189b59-7338-40b9-a1be-5a7e35cabdf6/extract/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.045161 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/util/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.297244 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/pull/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.297584 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/pull/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.348169 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/util/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.527920 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/util/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.559516 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/extract/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.570821 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ff9rmn_990f638a-8f13-4eaa-8eeb-6f7d1f3f032e/pull/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.766871 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/util/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.936811 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/pull/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.954520 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/util/0.log" Nov 30 09:35:32 crc kubenswrapper[4941]: I1130 09:35:32.956069 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/pull/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.162071 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/util/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.162936 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/extract/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.188242 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q4vgj_84659532-4d63-4199-a05a-7636f9a2f4d4/pull/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.399933 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/util/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.609065 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/pull/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.633916 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/pull/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.653146 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/util/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.916240 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/util/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.940908 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/extract/0.log" Nov 30 09:35:33 crc kubenswrapper[4941]: I1130 09:35:33.954691 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83qfm5x_9044019d-a819-45e8-85ff-263b655f3af6/pull/0.log" Nov 30 09:35:34 crc kubenswrapper[4941]: I1130 09:35:34.154443 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/extract-utilities/0.log" Nov 30 09:35:34 crc kubenswrapper[4941]: I1130 09:35:34.644144 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/extract-utilities/0.log" Nov 30 09:35:34 crc kubenswrapper[4941]: I1130 09:35:34.644464 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/extract-content/0.log" Nov 30 09:35:34 crc kubenswrapper[4941]: I1130 09:35:34.675569 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/extract-content/0.log" Nov 30 09:35:34 crc kubenswrapper[4941]: I1130 09:35:34.920736 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/extract-utilities/0.log" Nov 30 09:35:34 crc kubenswrapper[4941]: I1130 09:35:34.973742 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/extract-content/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.210148 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/extract-utilities/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.378950 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-tg6kf_207a16c0-01d3-49a0-8892-daf7e01defc8/registry-server/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.449374 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/extract-utilities/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.505703 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/extract-content/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.508949 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/extract-content/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.715404 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/extract-content/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.732355 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/extract-utilities/0.log" Nov 30 09:35:35 crc kubenswrapper[4941]: I1130 09:35:35.797069 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hzvnb_09c52383-5a1a-4a4a-a354-46da2eee2a39/marketplace-operator/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.104834 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-jwd8q_670f3a59-54e6-4bd2-a8fc-a74be681bae4/registry-server/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.405017 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/extract-utilities/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.593059 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/extract-content/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.631192 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/extract-utilities/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.639529 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/extract-content/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.841056 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/extract-content/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.883077 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/extract-utilities/0.log" Nov 30 09:35:36 crc kubenswrapper[4941]: I1130 09:35:36.900923 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/extract-utilities/0.log" Nov 30 09:35:37 crc kubenswrapper[4941]: I1130 09:35:37.107052 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/extract-utilities/0.log" Nov 30 09:35:37 crc kubenswrapper[4941]: I1130 09:35:37.144005 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/extract-content/0.log" Nov 30 09:35:37 crc kubenswrapper[4941]: I1130 09:35:37.155253 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/extract-content/0.log" Nov 30 09:35:37 crc kubenswrapper[4941]: I1130 09:35:37.237097 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-67mqs_5f241870-dc20-4e9d-b9ca-6504fc0b3eb1/registry-server/0.log" Nov 30 09:35:37 crc kubenswrapper[4941]: I1130 09:35:37.378699 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/extract-utilities/0.log" Nov 30 09:35:37 crc kubenswrapper[4941]: I1130 09:35:37.401917 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/extract-content/0.log" Nov 30 09:35:38 crc kubenswrapper[4941]: I1130 09:35:38.937984 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s5mh6_4faa4588-9dd2-4bf3-a983-acbe2b5996db/registry-server/0.log" Nov 30 09:35:53 crc kubenswrapper[4941]: I1130 09:35:53.609811 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-9gmqg_3514c902-e0b0-4f3e-a885-5f2c84daf49c/prometheus-operator/0.log" Nov 30 09:35:54 crc kubenswrapper[4941]: I1130 09:35:54.243127 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7b798cfbf-9hdkt_240c2020-021e-4c67-ba69-51ed7c6fb5a4/prometheus-operator-admission-webhook/0.log" Nov 30 09:35:54 crc kubenswrapper[4941]: I1130 09:35:54.256949 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7b798cfbf-gn6hd_056ca204-09f9-4e74-b66f-f7b59c87d535/prometheus-operator-admission-webhook/0.log" Nov 30 09:35:54 crc kubenswrapper[4941]: I1130 09:35:54.453545 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-9sjwb_11d96cee-acf1-4ada-ae48-e1888bb96c96/operator/0.log" Nov 30 09:35:54 crc kubenswrapper[4941]: I1130 09:35:54.525824 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-krb82_aaea3a7d-8b16-4e12-a9d3-5653861349b0/perses-operator/0.log" Nov 30 09:35:54 crc kubenswrapper[4941]: I1130 09:35:54.580051 4941 scope.go:117] "RemoveContainer" containerID="54e9ea0961143e049825abcf1f36ffed71351b657a72d882b8373c5aac20c186" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.405305 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mfgzw"] Nov 30 09:36:25 crc kubenswrapper[4941]: E1130 09:36:25.406619 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="extract-utilities" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.406635 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="extract-utilities" Nov 30 09:36:25 crc kubenswrapper[4941]: E1130 09:36:25.406667 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="extract-content" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.406674 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="extract-content" Nov 30 09:36:25 crc kubenswrapper[4941]: E1130 09:36:25.406714 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="registry-server" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.406720 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="registry-server" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.406933 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="1abd43b7-fa08-4bba-877e-94b77072a6da" containerName="registry-server" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.408580 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.431555 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mfgzw"] Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.468315 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdvlj\" (UniqueName: \"kubernetes.io/projected/7c25fe12-b67f-4323-8cce-537f603dae3f-kube-api-access-pdvlj\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.468497 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-utilities\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.468838 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-catalog-content\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.570691 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-catalog-content\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.570789 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdvlj\" (UniqueName: \"kubernetes.io/projected/7c25fe12-b67f-4323-8cce-537f603dae3f-kube-api-access-pdvlj\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.570944 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-utilities\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.572135 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-utilities\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.572494 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-catalog-content\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.699643 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdvlj\" (UniqueName: \"kubernetes.io/projected/7c25fe12-b67f-4323-8cce-537f603dae3f-kube-api-access-pdvlj\") pod \"community-operators-mfgzw\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:25 crc kubenswrapper[4941]: I1130 09:36:25.752463 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:26 crc kubenswrapper[4941]: I1130 09:36:26.405749 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mfgzw"] Nov 30 09:36:26 crc kubenswrapper[4941]: W1130 09:36:26.412661 4941 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c25fe12_b67f_4323_8cce_537f603dae3f.slice/crio-45e3672755e652acb8e8808ae649d9a0c80dfeddb00879ea9aced6b1b0f421f3 WatchSource:0}: Error finding container 45e3672755e652acb8e8808ae649d9a0c80dfeddb00879ea9aced6b1b0f421f3: Status 404 returned error can't find the container with id 45e3672755e652acb8e8808ae649d9a0c80dfeddb00879ea9aced6b1b0f421f3 Nov 30 09:36:26 crc kubenswrapper[4941]: I1130 09:36:26.912204 4941 generic.go:334] "Generic (PLEG): container finished" podID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerID="44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca" exitCode=0 Nov 30 09:36:26 crc kubenswrapper[4941]: I1130 09:36:26.912771 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerDied","Data":"44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca"} Nov 30 09:36:26 crc kubenswrapper[4941]: I1130 09:36:26.912812 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerStarted","Data":"45e3672755e652acb8e8808ae649d9a0c80dfeddb00879ea9aced6b1b0f421f3"} Nov 30 09:36:27 crc kubenswrapper[4941]: I1130 09:36:27.934393 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerStarted","Data":"68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964"} Nov 30 09:36:28 crc kubenswrapper[4941]: I1130 09:36:28.957221 4941 generic.go:334] "Generic (PLEG): container finished" podID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerID="68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964" exitCode=0 Nov 30 09:36:28 crc kubenswrapper[4941]: I1130 09:36:28.957353 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerDied","Data":"68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964"} Nov 30 09:36:29 crc kubenswrapper[4941]: I1130 09:36:29.972415 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerStarted","Data":"4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496"} Nov 30 09:36:30 crc kubenswrapper[4941]: I1130 09:36:30.002669 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mfgzw" podStartSLOduration=2.451390852 podStartE2EDuration="5.002641043s" podCreationTimestamp="2025-11-30 09:36:25 +0000 UTC" firstStartedPulling="2025-11-30 09:36:26.914386231 +0000 UTC m=+10207.682557840" lastFinishedPulling="2025-11-30 09:36:29.465636412 +0000 UTC m=+10210.233808031" observedRunningTime="2025-11-30 09:36:29.995213483 +0000 UTC m=+10210.763385102" watchObservedRunningTime="2025-11-30 09:36:30.002641043 +0000 UTC m=+10210.770812672" Nov 30 09:36:35 crc kubenswrapper[4941]: I1130 09:36:35.753313 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:35 crc kubenswrapper[4941]: I1130 09:36:35.754424 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:35 crc kubenswrapper[4941]: I1130 09:36:35.827265 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:36 crc kubenswrapper[4941]: I1130 09:36:36.140248 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:36 crc kubenswrapper[4941]: I1130 09:36:36.208005 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mfgzw"] Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.101467 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mfgzw" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="registry-server" containerID="cri-o://4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496" gracePeriod=2 Nov 30 09:36:38 crc kubenswrapper[4941]: E1130 09:36:38.226663 4941 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c25fe12_b67f_4323_8cce_537f603dae3f.slice/crio-conmon-4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c25fe12_b67f_4323_8cce_537f603dae3f.slice/crio-4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496.scope\": RecentStats: unable to find data in memory cache]" Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.781311 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.894941 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-catalog-content\") pod \"7c25fe12-b67f-4323-8cce-537f603dae3f\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.895174 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-utilities\") pod \"7c25fe12-b67f-4323-8cce-537f603dae3f\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.895363 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdvlj\" (UniqueName: \"kubernetes.io/projected/7c25fe12-b67f-4323-8cce-537f603dae3f-kube-api-access-pdvlj\") pod \"7c25fe12-b67f-4323-8cce-537f603dae3f\" (UID: \"7c25fe12-b67f-4323-8cce-537f603dae3f\") " Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.896590 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-utilities" (OuterVolumeSpecName: "utilities") pod "7c25fe12-b67f-4323-8cce-537f603dae3f" (UID: "7c25fe12-b67f-4323-8cce-537f603dae3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:36:38 crc kubenswrapper[4941]: I1130 09:36:38.968979 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c25fe12-b67f-4323-8cce-537f603dae3f" (UID: "7c25fe12-b67f-4323-8cce-537f603dae3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.001131 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.001195 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c25fe12-b67f-4323-8cce-537f603dae3f-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.117310 4941 generic.go:334] "Generic (PLEG): container finished" podID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerID="4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496" exitCode=0 Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.117471 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerDied","Data":"4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496"} Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.117518 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mfgzw" event={"ID":"7c25fe12-b67f-4323-8cce-537f603dae3f","Type":"ContainerDied","Data":"45e3672755e652acb8e8808ae649d9a0c80dfeddb00879ea9aced6b1b0f421f3"} Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.117551 4941 scope.go:117] "RemoveContainer" containerID="4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.117752 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mfgzw" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.186248 4941 scope.go:117] "RemoveContainer" containerID="68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.890483 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c25fe12-b67f-4323-8cce-537f603dae3f-kube-api-access-pdvlj" (OuterVolumeSpecName: "kube-api-access-pdvlj") pod "7c25fe12-b67f-4323-8cce-537f603dae3f" (UID: "7c25fe12-b67f-4323-8cce-537f603dae3f"). InnerVolumeSpecName "kube-api-access-pdvlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.909384 4941 scope.go:117] "RemoveContainer" containerID="44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca" Nov 30 09:36:39 crc kubenswrapper[4941]: I1130 09:36:39.928004 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdvlj\" (UniqueName: \"kubernetes.io/projected/7c25fe12-b67f-4323-8cce-537f603dae3f-kube-api-access-pdvlj\") on node \"crc\" DevicePath \"\"" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.080103 4941 scope.go:117] "RemoveContainer" containerID="4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496" Nov 30 09:36:40 crc kubenswrapper[4941]: E1130 09:36:40.080719 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496\": container with ID starting with 4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496 not found: ID does not exist" containerID="4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.080761 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496"} err="failed to get container status \"4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496\": rpc error: code = NotFound desc = could not find container \"4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496\": container with ID starting with 4110512cf03abb0b98f4603195338fa39bd36e79f3c4f8f4fcf3376a283e8496 not found: ID does not exist" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.080786 4941 scope.go:117] "RemoveContainer" containerID="68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964" Nov 30 09:36:40 crc kubenswrapper[4941]: E1130 09:36:40.081747 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964\": container with ID starting with 68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964 not found: ID does not exist" containerID="68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.081772 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964"} err="failed to get container status \"68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964\": rpc error: code = NotFound desc = could not find container \"68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964\": container with ID starting with 68225861c675c7f58558220d687bce0b2c9dc62effa3379501a0348304f3a964 not found: ID does not exist" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.081789 4941 scope.go:117] "RemoveContainer" containerID="44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca" Nov 30 09:36:40 crc kubenswrapper[4941]: E1130 09:36:40.082118 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca\": container with ID starting with 44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca not found: ID does not exist" containerID="44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.082168 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca"} err="failed to get container status \"44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca\": rpc error: code = NotFound desc = could not find container \"44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca\": container with ID starting with 44406460d0631daff32bf75c9238ef0e8944ba6230b7a852e658f51ab906daca not found: ID does not exist" Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.134572 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mfgzw"] Nov 30 09:36:40 crc kubenswrapper[4941]: I1130 09:36:40.147803 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mfgzw"] Nov 30 09:36:41 crc kubenswrapper[4941]: I1130 09:36:41.542245 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" path="/var/lib/kubelet/pods/7c25fe12-b67f-4323-8cce-537f603dae3f/volumes" Nov 30 09:37:32 crc kubenswrapper[4941]: I1130 09:37:32.979429 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:37:32 crc kubenswrapper[4941]: I1130 09:37:32.980280 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:38:02 crc kubenswrapper[4941]: I1130 09:38:02.979493 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:38:02 crc kubenswrapper[4941]: I1130 09:38:02.980226 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:38:17 crc kubenswrapper[4941]: I1130 09:38:17.550301 4941 generic.go:334] "Generic (PLEG): container finished" podID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerID="0a66335a296f4691caed04ecbae0d10673a73c80b4dc1f2e40ff1f86c85a2177" exitCode=0 Nov 30 09:38:17 crc kubenswrapper[4941]: I1130 09:38:17.557024 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z2zpj/must-gather-28nwg" event={"ID":"4615cc80-a7a5-4bee-928f-bf978c248d8d","Type":"ContainerDied","Data":"0a66335a296f4691caed04ecbae0d10673a73c80b4dc1f2e40ff1f86c85a2177"} Nov 30 09:38:17 crc kubenswrapper[4941]: I1130 09:38:17.584146 4941 scope.go:117] "RemoveContainer" containerID="0a66335a296f4691caed04ecbae0d10673a73c80b4dc1f2e40ff1f86c85a2177" Nov 30 09:38:17 crc kubenswrapper[4941]: I1130 09:38:17.685759 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-z2zpj_must-gather-28nwg_4615cc80-a7a5-4bee-928f-bf978c248d8d/gather/0.log" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.246356 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-z2zpj/must-gather-28nwg"] Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.247608 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-z2zpj/must-gather-28nwg" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="copy" containerID="cri-o://ad7d847fb41ebd999ef658eed6127ef7ed7283ac1e307c67a3cdcd880a6991f3" gracePeriod=2 Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.303589 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-z2zpj/must-gather-28nwg"] Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.719917 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-z2zpj_must-gather-28nwg_4615cc80-a7a5-4bee-928f-bf978c248d8d/copy/0.log" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.720444 4941 generic.go:334] "Generic (PLEG): container finished" podID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerID="ad7d847fb41ebd999ef658eed6127ef7ed7283ac1e307c67a3cdcd880a6991f3" exitCode=143 Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.720533 4941 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db0519a6898aefb47da51ff07b25168acaef5182a26bfe053906523d92bafce5" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.748316 4941 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-z2zpj_must-gather-28nwg_4615cc80-a7a5-4bee-928f-bf978c248d8d/copy/0.log" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.748712 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.802120 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4615cc80-a7a5-4bee-928f-bf978c248d8d-must-gather-output\") pod \"4615cc80-a7a5-4bee-928f-bf978c248d8d\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.802380 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szd9c\" (UniqueName: \"kubernetes.io/projected/4615cc80-a7a5-4bee-928f-bf978c248d8d-kube-api-access-szd9c\") pod \"4615cc80-a7a5-4bee-928f-bf978c248d8d\" (UID: \"4615cc80-a7a5-4bee-928f-bf978c248d8d\") " Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.812193 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4615cc80-a7a5-4bee-928f-bf978c248d8d-kube-api-access-szd9c" (OuterVolumeSpecName: "kube-api-access-szd9c") pod "4615cc80-a7a5-4bee-928f-bf978c248d8d" (UID: "4615cc80-a7a5-4bee-928f-bf978c248d8d"). InnerVolumeSpecName "kube-api-access-szd9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.905341 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szd9c\" (UniqueName: \"kubernetes.io/projected/4615cc80-a7a5-4bee-928f-bf978c248d8d-kube-api-access-szd9c\") on node \"crc\" DevicePath \"\"" Nov 30 09:38:27 crc kubenswrapper[4941]: I1130 09:38:27.991996 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4615cc80-a7a5-4bee-928f-bf978c248d8d-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4615cc80-a7a5-4bee-928f-bf978c248d8d" (UID: "4615cc80-a7a5-4bee-928f-bf978c248d8d"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:38:28 crc kubenswrapper[4941]: I1130 09:38:28.008298 4941 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4615cc80-a7a5-4bee-928f-bf978c248d8d-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 30 09:38:28 crc kubenswrapper[4941]: I1130 09:38:28.731402 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z2zpj/must-gather-28nwg" Nov 30 09:38:29 crc kubenswrapper[4941]: I1130 09:38:29.543544 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" path="/var/lib/kubelet/pods/4615cc80-a7a5-4bee-928f-bf978c248d8d/volumes" Nov 30 09:38:32 crc kubenswrapper[4941]: I1130 09:38:32.979133 4941 patch_prober.go:28] interesting pod/machine-config-daemon-5pscg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 30 09:38:32 crc kubenswrapper[4941]: I1130 09:38:32.980147 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 30 09:38:32 crc kubenswrapper[4941]: I1130 09:38:32.980220 4941 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" Nov 30 09:38:32 crc kubenswrapper[4941]: I1130 09:38:32.981479 4941 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc"} pod="openshift-machine-config-operator/machine-config-daemon-5pscg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 30 09:38:32 crc kubenswrapper[4941]: I1130 09:38:32.981545 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerName="machine-config-daemon" containerID="cri-o://00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" gracePeriod=600 Nov 30 09:38:33 crc kubenswrapper[4941]: E1130 09:38:33.113028 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:38:33 crc kubenswrapper[4941]: I1130 09:38:33.800259 4941 generic.go:334] "Generic (PLEG): container finished" podID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" exitCode=0 Nov 30 09:38:33 crc kubenswrapper[4941]: I1130 09:38:33.800394 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" event={"ID":"6d39a3a2-8387-4108-aad6-3bfd59ad0018","Type":"ContainerDied","Data":"00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc"} Nov 30 09:38:33 crc kubenswrapper[4941]: I1130 09:38:33.800487 4941 scope.go:117] "RemoveContainer" containerID="753d67d459852b50e5ca02ffb3cea393cf13d47c8faaf0defaa8dd4fdd572223" Nov 30 09:38:33 crc kubenswrapper[4941]: I1130 09:38:33.801768 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:38:33 crc kubenswrapper[4941]: E1130 09:38:33.802239 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:38:47 crc kubenswrapper[4941]: I1130 09:38:47.522768 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:38:47 crc kubenswrapper[4941]: E1130 09:38:47.524018 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:38:54 crc kubenswrapper[4941]: I1130 09:38:54.720843 4941 scope.go:117] "RemoveContainer" containerID="0a66335a296f4691caed04ecbae0d10673a73c80b4dc1f2e40ff1f86c85a2177" Nov 30 09:38:54 crc kubenswrapper[4941]: I1130 09:38:54.773088 4941 scope.go:117] "RemoveContainer" containerID="ad7d847fb41ebd999ef658eed6127ef7ed7283ac1e307c67a3cdcd880a6991f3" Nov 30 09:38:58 crc kubenswrapper[4941]: I1130 09:38:58.522098 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:38:58 crc kubenswrapper[4941]: E1130 09:38:58.522983 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:39:11 crc kubenswrapper[4941]: I1130 09:39:11.522822 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:39:11 crc kubenswrapper[4941]: E1130 09:39:11.524590 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:39:26 crc kubenswrapper[4941]: I1130 09:39:26.522495 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:39:26 crc kubenswrapper[4941]: E1130 09:39:26.523668 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:39:37 crc kubenswrapper[4941]: I1130 09:39:37.523706 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:39:37 crc kubenswrapper[4941]: E1130 09:39:37.525035 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:39:49 crc kubenswrapper[4941]: I1130 09:39:49.790359 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerName="galera" probeResult="failure" output="command timed out" Nov 30 09:39:49 crc kubenswrapper[4941]: I1130 09:39:49.790442 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerName="galera" probeResult="failure" output="command timed out" Nov 30 09:39:50 crc kubenswrapper[4941]: I1130 09:39:50.523600 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:39:50 crc kubenswrapper[4941]: E1130 09:39:50.527201 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:40:04 crc kubenswrapper[4941]: I1130 09:40:04.521501 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:40:04 crc kubenswrapper[4941]: E1130 09:40:04.522787 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:40:16 crc kubenswrapper[4941]: I1130 09:40:16.523338 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:40:16 crc kubenswrapper[4941]: E1130 09:40:16.524348 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:40:29 crc kubenswrapper[4941]: I1130 09:40:29.539968 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:40:29 crc kubenswrapper[4941]: E1130 09:40:29.541716 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:40:44 crc kubenswrapper[4941]: I1130 09:40:44.522911 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:40:44 crc kubenswrapper[4941]: E1130 09:40:44.524097 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:40:58 crc kubenswrapper[4941]: I1130 09:40:58.522226 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:40:58 crc kubenswrapper[4941]: E1130 09:40:58.523733 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.962122 4941 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gwpsr"] Nov 30 09:41:07 crc kubenswrapper[4941]: E1130 09:41:07.964292 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="gather" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.964356 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="gather" Nov 30 09:41:07 crc kubenswrapper[4941]: E1130 09:41:07.964382 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="extract-content" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.964396 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="extract-content" Nov 30 09:41:07 crc kubenswrapper[4941]: E1130 09:41:07.964434 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="copy" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.964449 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="copy" Nov 30 09:41:07 crc kubenswrapper[4941]: E1130 09:41:07.964489 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="extract-utilities" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.964506 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="extract-utilities" Nov 30 09:41:07 crc kubenswrapper[4941]: E1130 09:41:07.964533 4941 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="registry-server" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.964548 4941 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="registry-server" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.964993 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="copy" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.965032 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="4615cc80-a7a5-4bee-928f-bf978c248d8d" containerName="gather" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.965061 4941 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c25fe12-b67f-4323-8cce-537f603dae3f" containerName="registry-server" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.968447 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:07 crc kubenswrapper[4941]: I1130 09:41:07.980622 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gwpsr"] Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.097383 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-catalog-content\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.097918 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-utilities\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.097952 4941 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv4hn\" (UniqueName: \"kubernetes.io/projected/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-kube-api-access-rv4hn\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.200474 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-catalog-content\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.200613 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-utilities\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.200648 4941 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv4hn\" (UniqueName: \"kubernetes.io/projected/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-kube-api-access-rv4hn\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.201125 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-catalog-content\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.201205 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-utilities\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.228722 4941 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv4hn\" (UniqueName: \"kubernetes.io/projected/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-kube-api-access-rv4hn\") pod \"redhat-operators-gwpsr\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.314269 4941 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.841315 4941 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gwpsr"] Nov 30 09:41:08 crc kubenswrapper[4941]: I1130 09:41:08.927914 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerStarted","Data":"d2af2dc18d0e088f2fa7b289fa4defdaddbfba128a3221d26997db0b4f5c7c52"} Nov 30 09:41:09 crc kubenswrapper[4941]: I1130 09:41:09.948787 4941 generic.go:334] "Generic (PLEG): container finished" podID="2aef6a85-7a84-432b-9e14-2f5a57fd00d9" containerID="6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a" exitCode=0 Nov 30 09:41:09 crc kubenswrapper[4941]: I1130 09:41:09.948859 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerDied","Data":"6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a"} Nov 30 09:41:09 crc kubenswrapper[4941]: I1130 09:41:09.953201 4941 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 30 09:41:10 crc kubenswrapper[4941]: I1130 09:41:10.523395 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:41:10 crc kubenswrapper[4941]: E1130 09:41:10.524475 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:41:10 crc kubenswrapper[4941]: I1130 09:41:10.966372 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerStarted","Data":"e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c"} Nov 30 09:41:14 crc kubenswrapper[4941]: I1130 09:41:14.005675 4941 generic.go:334] "Generic (PLEG): container finished" podID="2aef6a85-7a84-432b-9e14-2f5a57fd00d9" containerID="e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c" exitCode=0 Nov 30 09:41:14 crc kubenswrapper[4941]: I1130 09:41:14.005765 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerDied","Data":"e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c"} Nov 30 09:41:15 crc kubenswrapper[4941]: I1130 09:41:15.022899 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerStarted","Data":"db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7"} Nov 30 09:41:15 crc kubenswrapper[4941]: I1130 09:41:15.071002 4941 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gwpsr" podStartSLOduration=3.454160968 podStartE2EDuration="8.070967698s" podCreationTimestamp="2025-11-30 09:41:07 +0000 UTC" firstStartedPulling="2025-11-30 09:41:09.951875916 +0000 UTC m=+10490.720047545" lastFinishedPulling="2025-11-30 09:41:14.568682666 +0000 UTC m=+10495.336854275" observedRunningTime="2025-11-30 09:41:15.050557385 +0000 UTC m=+10495.818728984" watchObservedRunningTime="2025-11-30 09:41:15.070967698 +0000 UTC m=+10495.839139337" Nov 30 09:41:18 crc kubenswrapper[4941]: I1130 09:41:18.314709 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:18 crc kubenswrapper[4941]: I1130 09:41:18.315777 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:19 crc kubenswrapper[4941]: I1130 09:41:19.371124 4941 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gwpsr" podUID="2aef6a85-7a84-432b-9e14-2f5a57fd00d9" containerName="registry-server" probeResult="failure" output=< Nov 30 09:41:19 crc kubenswrapper[4941]: timeout: failed to connect service ":50051" within 1s Nov 30 09:41:19 crc kubenswrapper[4941]: > Nov 30 09:41:19 crc kubenswrapper[4941]: I1130 09:41:19.790123 4941 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerName="galera" probeResult="failure" output="command timed out" Nov 30 09:41:19 crc kubenswrapper[4941]: I1130 09:41:19.792603 4941 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="69a33c95-4877-49f2-9bcc-59d5c750a626" containerName="galera" probeResult="failure" output="command timed out" Nov 30 09:41:24 crc kubenswrapper[4941]: I1130 09:41:24.524285 4941 scope.go:117] "RemoveContainer" containerID="00fa207f579bec6cadeb08c3531d3dcd1397c31c6cf0f50529639fa0443a47bc" Nov 30 09:41:24 crc kubenswrapper[4941]: E1130 09:41:24.525691 4941 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5pscg_openshift-machine-config-operator(6d39a3a2-8387-4108-aad6-3bfd59ad0018)\"" pod="openshift-machine-config-operator/machine-config-daemon-5pscg" podUID="6d39a3a2-8387-4108-aad6-3bfd59ad0018" Nov 30 09:41:28 crc kubenswrapper[4941]: I1130 09:41:28.401228 4941 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:28 crc kubenswrapper[4941]: I1130 09:41:28.475976 4941 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:28 crc kubenswrapper[4941]: I1130 09:41:28.651390 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gwpsr"] Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.253793 4941 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gwpsr" podUID="2aef6a85-7a84-432b-9e14-2f5a57fd00d9" containerName="registry-server" containerID="cri-o://db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7" gracePeriod=2 Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.781587 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.868800 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv4hn\" (UniqueName: \"kubernetes.io/projected/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-kube-api-access-rv4hn\") pod \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.869150 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-catalog-content\") pod \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.869237 4941 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-utilities\") pod \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\" (UID: \"2aef6a85-7a84-432b-9e14-2f5a57fd00d9\") " Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.869974 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-utilities" (OuterVolumeSpecName: "utilities") pod "2aef6a85-7a84-432b-9e14-2f5a57fd00d9" (UID: "2aef6a85-7a84-432b-9e14-2f5a57fd00d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.876799 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-kube-api-access-rv4hn" (OuterVolumeSpecName: "kube-api-access-rv4hn") pod "2aef6a85-7a84-432b-9e14-2f5a57fd00d9" (UID: "2aef6a85-7a84-432b-9e14-2f5a57fd00d9"). InnerVolumeSpecName "kube-api-access-rv4hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.971833 4941 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-utilities\") on node \"crc\" DevicePath \"\"" Nov 30 09:41:30 crc kubenswrapper[4941]: I1130 09:41:30.971874 4941 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv4hn\" (UniqueName: \"kubernetes.io/projected/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-kube-api-access-rv4hn\") on node \"crc\" DevicePath \"\"" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.003407 4941 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2aef6a85-7a84-432b-9e14-2f5a57fd00d9" (UID: "2aef6a85-7a84-432b-9e14-2f5a57fd00d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.075036 4941 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aef6a85-7a84-432b-9e14-2f5a57fd00d9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.267702 4941 generic.go:334] "Generic (PLEG): container finished" podID="2aef6a85-7a84-432b-9e14-2f5a57fd00d9" containerID="db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7" exitCode=0 Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.267776 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerDied","Data":"db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7"} Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.267844 4941 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwpsr" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.268453 4941 scope.go:117] "RemoveContainer" containerID="db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.274562 4941 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwpsr" event={"ID":"2aef6a85-7a84-432b-9e14-2f5a57fd00d9","Type":"ContainerDied","Data":"d2af2dc18d0e088f2fa7b289fa4defdaddbfba128a3221d26997db0b4f5c7c52"} Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.299057 4941 scope.go:117] "RemoveContainer" containerID="e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.324068 4941 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gwpsr"] Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.330878 4941 scope.go:117] "RemoveContainer" containerID="6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.338549 4941 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gwpsr"] Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.405672 4941 scope.go:117] "RemoveContainer" containerID="db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7" Nov 30 09:41:31 crc kubenswrapper[4941]: E1130 09:41:31.406710 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7\": container with ID starting with db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7 not found: ID does not exist" containerID="db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.406751 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7"} err="failed to get container status \"db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7\": rpc error: code = NotFound desc = could not find container \"db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7\": container with ID starting with db8714ca170c0b1a896df06e5907afe50adf6df09c849fe924bf2b9d6ce35ba7 not found: ID does not exist" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.406774 4941 scope.go:117] "RemoveContainer" containerID="e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c" Nov 30 09:41:31 crc kubenswrapper[4941]: E1130 09:41:31.407164 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c\": container with ID starting with e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c not found: ID does not exist" containerID="e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.407229 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c"} err="failed to get container status \"e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c\": rpc error: code = NotFound desc = could not find container \"e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c\": container with ID starting with e3a902530c5623cb3d2a6fb4be8d0fe0251e7281c06a35ed897f1e59f637c21c not found: ID does not exist" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.407267 4941 scope.go:117] "RemoveContainer" containerID="6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a" Nov 30 09:41:31 crc kubenswrapper[4941]: E1130 09:41:31.407697 4941 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a\": container with ID starting with 6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a not found: ID does not exist" containerID="6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.407728 4941 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a"} err="failed to get container status \"6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a\": rpc error: code = NotFound desc = could not find container \"6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a\": container with ID starting with 6b9433cb22656c364c596909d65339d30240ed41c0ae59a96a64da6d641de54a not found: ID does not exist" Nov 30 09:41:31 crc kubenswrapper[4941]: I1130 09:41:31.535164 4941 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aef6a85-7a84-432b-9e14-2f5a57fd00d9" path="/var/lib/kubelet/pods/2aef6a85-7a84-432b-9e14-2f5a57fd00d9/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113010522024433 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113010523017351 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015112763541016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015112763542015463 5ustar corecore